예제 #1
0
def flatten_experiments(filename_object_list):
    """
    Flatten a list of experiment lists

    :param filename_object_list: The parameter item
    :return: The flattened experiment lists
    """
    from dxtbx.model.experiment_list import ExperimentList

    result = ExperimentList()
    for o in filename_object_list:
        result.extend(o.data)
    return result
예제 #2
0
def flatten_experiments(filename_object_list):
    '''
  Flatten a list of experiment lists

  :param filename_object_list: The parameter item
  :return: The flattened experiment lists

  '''
    from dxtbx.model.experiment_list import ExperimentList
    result = ExperimentList()
    for i in range(len(filename_object_list)):
        result.extend(filename_object_list[i].data)
    return result
예제 #3
0
def save_experiments(filename):
    from xia2.Schema import imageset_cache
    from dxtbx.model.experiment_list import ExperimentList
    from dxtbx.model.experiment_list import ExperimentListFactory
    from dxtbx.serialize import dump

    experiments = ExperimentList([])
    for imagesets in imageset_cache.values():
        for imageset in imagesets.values():
            experiments.extend(
                ExperimentListFactory.from_imageset_and_crystal(
                    imageset, None))

    dump.experiment_list(experiments, filename, compact=True)
예제 #4
0
  def redistribute(self, experiments, reflections, mpi_communicator, number_of_mpi_ranks):
    from dials.array_family import flex
    from dxtbx.model.experiment_list import ExperimentList

    split_experiments = self.divide_list_into_chunks(experiments, number_of_mpi_ranks)

    # if some (but not all!) chunks are empty, we want those empty chunks to be randomly distributed
    number_of_empty_chunks = [len(split_experiments[i]) for i in range(len(split_experiments))].count(0)
    if number_of_empty_chunks > 0 and len(experiments) != 0:
      import random
      #random.seed(8)
      random.shuffle(split_experiments)

    '''
    self.logger.log("Split experiment list into %d chunks"%len(split_experiments))
    for chunk in split_experiments:
      self.logger.log(len(chunk))
    '''

    split_reflections = []
    for i in range(number_of_mpi_ranks):
      split_reflections.append(flex.reflection_table())
      for experiment_id, experiment in enumerate(split_experiments[i]):
        refls = reflections.select(reflections['exp_id'] == experiment.identifier)
        split_reflections[i].extend(refls)

    self.logger.log("Split experiments and reflections")
    self.logger.log(get_memory_usage())

    del experiments
    del reflections

    self.logger.log("Deleted experiments and reflections")
    self.logger.log(get_memory_usage())

    new_split_experiments = mpi_communicator.alltoall(split_experiments)
    new_split_reflections = mpi_communicator.alltoall(split_reflections)

    self.logger.log("Consolidating data...")

    new_experiments = ExperimentList()
    new_reflections = flex.reflection_table()
    for entry in new_split_experiments:
      new_experiments.extend(entry)
    for entry in new_split_reflections:
      new_reflections.extend(entry)

    self.logger.log(get_memory_usage())

    return new_experiments, new_reflections
예제 #5
0
    def exchange_experiments_by_alltoall(self, mpi_communicator):
        self.logger.log_step_time("LB_EXPTS_ALL_TO_ALL")
        new_split_experiments = mpi_communicator.alltoall(
            self.split_experiments)
        del self.split_experiments
        self.logger.log_step_time("LB_EXPTS_ALL_TO_ALL", True)

        self.logger.log_step_time("LB_EXPTS_CONSOLIDATE")
        self.logger.log("Consolidating experiments after all-to-all...")
        new_experiments = ExperimentList()
        for entry in new_split_experiments:
            new_experiments.extend(entry)
        del new_split_experiments
        self.logger.log_step_time("LB_EXPTS_CONSOLIDATE", True)

        return new_experiments
예제 #6
0
  def run(self, experiments, reflections):

    # merge reflection intensities: calculate the average and other statistics
    self.logger.log_step_time("AVERAGE")
    self.logger.log("Averaging intensities...")
    all_rank_merged_reflections = self.merging_reflection_table()

    if len(reflections) > 0:
      for hkl_reflection_table in reflection_table_utils.get_next_hkl_reflection_table(reflections):
        intensity_stats = self.calc_reflection_intensity_stats(reflections=hkl_reflection_table)
        intensity_stats['miller_index'] = hkl_reflection_table[0].get('miller_index_asymmetric')
        all_rank_merged_reflections.append(intensity_stats)

    self.logger.log("Merged intensities for %d HKLs"%(all_rank_merged_reflections.size()))
    self.logger.log_step_time("AVERAGE", True)

    # gather all merged intensities at rank 0
    self.logger.log_step_time("GATHER")
    if self.mpi_helper.rank != 0:
      self.logger.log("Executing MPI gathering of all reflection tables at rank 0...")
    all_merged_reflection_tables = self.mpi_helper.comm.gather(all_rank_merged_reflections, root = 0)
    all_experiment_lists = self.mpi_helper.comm.gather(experiments, root = 0)
    #results = self.mpi_helper.comm.gather((experiments, all_rank_merged_reflections), root = 0)
    self.logger.log_step_time("GATHER", True)

    # rank 0: concatenate all merged intensities into the final table
    if self.mpi_helper.rank == 0:
      self.logger.log_step_time("MERGE")
      all_experiments = ExperimentList()
      final_merged_reflection_table = self.merging_reflection_table()
      self.logger.log("Performing final merging of reflection tables and experiments received from all ranks...")
      for table in all_merged_reflection_tables:
        final_merged_reflection_table.extend(table)
      for expts in all_experiment_lists:
        all_experiments.extend(expts)
      self.logger.main_log("Total merged HKLs: {}".format(final_merged_reflection_table.size()))
      self.logger.log_step_time("MERGE", True)

      return all_experiments, final_merged_reflection_table
    else:
      return None, None
예제 #7
0
    def load_data(self):
        from dxtbx.model.experiment_list import ExperimentList
        from dials.array_family import flex
        all_experiments = ExperimentList()
        all_reflections = flex.reflection_table()

        # example showing what reading all the data into a single experiment list/
        # reflection table would look like
        loader = file_loader(self.params)
        for experiments_filename, reflections_filename in loader.filepair_generator(
        ):
            experiments, reflections = loader.load_data(
                experiments_filename, reflections_filename)
            sel = (reflections['id'] < 0) | (reflections['id'] >=
                                             len(experiments))
            assert sel.count(
                True) == 0, "Unindexed or invalid reflections found"
            reflections['id'] += len(all_experiments)
            all_reflections.extend(reflections)
            all_experiments.extend(experiments)

        return all_experiments, all_reflections
예제 #8
0
    def index(self):
        # most of this is the same as dials.algorithms.indexing.indexer.indexer_base.index(), with some stills
        # specific modifications (don't re-index after choose best orientation matrix, but use the indexing from
        # choose best orientation matrix, also don't use macrocycles) of refinement after indexing.
        # 2017 update: do accept multiple lattices per shot

        experiments = ExperimentList()

        while True:
            self.d_min = self.params.refinement_protocol.d_min_start
            max_lattices = self.params.multiple_lattice_search.max_lattices
            if max_lattices is not None and len(experiments) >= max_lattices:
                break
            if len(experiments) > 0:
                cutoff_fraction = (
                    self.params.multiple_lattice_search.recycle_unindexed_reflections_cutoff
                )
                d_spacings = 1 / self.reflections["rlp"].norms()
                d_min_indexed = flex.min(d_spacings.select(self.indexed_reflections))
                min_reflections_for_indexing = cutoff_fraction * len(
                    self.reflections.select(d_spacings > d_min_indexed)
                )
                crystal_ids = self.reflections.select(d_spacings > d_min_indexed)["id"]
                if (crystal_ids == -1).count(True) < min_reflections_for_indexing:
                    logger.info(
                        "Finish searching for more lattices: %i unindexed reflections remaining."
                        % (min_reflections_for_indexing)
                    )
                    break

            n_lattices_previous_cycle = len(experiments)

            # index multiple lattices per shot
            if len(experiments) == 0:
                new = self.find_lattices()
                generate_experiment_identifiers(new)
                experiments.extend(new)
                if len(experiments) == 0:
                    raise DialsIndexError("No suitable lattice could be found.")
            else:
                try:
                    new = self.find_lattices()
                    generate_experiment_identifiers(new)
                    experiments.extend(new)
                except Exception as e:
                    logger.info("Indexing remaining reflections failed")
                    logger.debug(
                        "Indexing remaining reflections failed, exception:\n" + str(e)
                    )

            # reset reflection lattice flags
            # the lattice a given reflection belongs to: a value of -1 indicates
            # that a reflection doesn't belong to any lattice so far
            self.reflections["id"] = flex.int(len(self.reflections), -1)

            self.index_reflections(experiments, self.reflections)

            if len(experiments) == n_lattices_previous_cycle:
                # no more lattices found
                break

            if (
                not self.params.stills.refine_candidates_with_known_symmetry
                and self.params.known_symmetry.space_group is not None
            ):
                self._apply_symmetry_post_indexing(
                    experiments, self.reflections, n_lattices_previous_cycle
                )

            # discard nearly overlapping lattices on the same shot
            if self._check_have_similar_crystal_models(experiments):
                break

            self.indexed_reflections = self.reflections["id"] > -1
            if self.d_min is None:
                sel = self.reflections["id"] <= -1
            else:
                sel = flex.bool(len(self.reflections), False)
                lengths = 1 / self.reflections["rlp"].norms()
                isel = (lengths >= self.d_min).iselection()
                sel.set_selected(isel, True)
                sel.set_selected(self.reflections["id"] > -1, False)
            self.unindexed_reflections = self.reflections.select(sel)

            reflections_for_refinement = self.reflections.select(
                self.indexed_reflections
            )

            if len(self.params.stills.isoforms) > 0:
                logger.info("")
                logger.info("#" * 80)
                logger.info("Starting refinement")
                logger.info("#" * 80)
                logger.info("")

                isoform_experiments = ExperimentList()
                isoform_reflections = flex.reflection_table()
                # Note, changes to params after initial indexing. Cannot use tie to target when fixing the unit cell.
                self.all_params.refinement.reflections.outlier.algorithm = "null"
                self.all_params.refinement.parameterisation.crystal.fix = "cell"
                self.all_params.refinement.parameterisation.crystal.unit_cell.restraints.tie_to_target = (
                    []
                )

                for expt_id, experiment in enumerate(experiments):
                    reflections = reflections_for_refinement.select(
                        reflections_for_refinement["id"] == expt_id
                    )
                    reflections["id"] = flex.int(len(reflections), 0)
                    refiners = []
                    for isoform in self.params.stills.isoforms:
                        iso_experiment = copy.deepcopy(experiment)
                        crystal = iso_experiment.crystal
                        if (
                            isoform.lookup_symbol
                            != crystal.get_space_group().type().lookup_symbol()
                        ):
                            logger.info(
                                "Crystal isoform lookup_symbol %s does not match isoform %s lookup_symbol %s"
                                % (
                                    crystal.get_space_group().type().lookup_symbol(),
                                    isoform.name,
                                    isoform.lookup_symbol,
                                )
                            )
                            continue
                        crystal.set_B(isoform.cell.fractionalization_matrix())

                        logger.info("Refining isoform %s" % isoform.name)
                        refiners.append(
                            e_refine(
                                params=self.all_params,
                                experiments=ExperimentList([iso_experiment]),
                                reflections=reflections,
                                graph_verbose=False,
                            )
                        )

                    if len(refiners) == 0:
                        raise DialsIndexError(
                            "No isoforms had a lookup symbol that matched"
                        )
                    positional_rmsds = [
                        math.sqrt(P.rmsds()[0] ** 2 + P.rmsds()[1] ** 2)
                        for P in refiners
                    ]
                    logger.info(
                        "Positional rmsds for all isoforms:" + str(positional_rmsds)
                    )
                    minrmsd_mm = min(positional_rmsds)
                    minindex = positional_rmsds.index(minrmsd_mm)
                    logger.info(
                        "The smallest rmsd is %5.1f um from isoform %s"
                        % (
                            1000.0 * minrmsd_mm,
                            self.params.stills.isoforms[minindex].name,
                        )
                    )
                    if self.params.stills.isoforms[minindex].rmsd_target_mm is not None:
                        logger.info(
                            "Asserting %f < %f"
                            % (
                                minrmsd_mm,
                                self.params.stills.isoforms[minindex].rmsd_target_mm,
                            )
                        )
                        assert (
                            minrmsd_mm
                            < self.params.stills.isoforms[minindex].rmsd_target_mm
                        )
                    logger.info(
                        "Acceptable rmsd for isoform %s."
                        % (self.params.stills.isoforms[minindex].name)
                    )
                    if len(self.params.stills.isoforms) == 2:
                        logger.info(
                            "Rmsd gain over the other isoform %5.1f um."
                            % (1000.0 * abs(positional_rmsds[0] - positional_rmsds[1]))
                        )
                    R = refiners[minindex]
                    # Now one last check to see if direct beam is out of bounds
                    if self.params.stills.isoforms[minindex].beam_restraint is not None:
                        from scitbx import matrix

                        refined_beam = matrix.col(
                            R.get_experiments()[0]
                            .detector[0]
                            .get_beam_centre_lab(experiments[0].beam.get_s0())[0:2]
                        )
                        known_beam = matrix.col(
                            self.params.stills.isoforms[minindex].beam_restraint
                        )
                        logger.info(
                            "Asserting difference in refined beam center and expected beam center %f < %f"
                            % (
                                (refined_beam - known_beam).length(),
                                self.params.stills.isoforms[minindex].rmsd_target_mm,
                            )
                        )
                        assert (
                            (refined_beam - known_beam).length()
                            < self.params.stills.isoforms[minindex].rmsd_target_mm
                        )
                        # future--circle of confusion could be given as a separate length in mm instead of reusing rmsd_target

                    experiment = R.get_experiments()[0]
                    experiment.crystal.identified_isoform = self.params.stills.isoforms[
                        minindex
                    ].name

                    isoform_experiments.append(experiment)
                    reflections["id"] = flex.int(len(reflections), expt_id)
                    isoform_reflections.extend(reflections)
                experiments = isoform_experiments
                reflections_for_refinement = isoform_reflections

            if self.params.refinement_protocol.mode == "repredict_only":

                from dials.algorithms.indexing.nave_parameters import NaveParameters
                from dials.algorithms.refinement.prediction.managed_predictors import (
                    ExperimentsPredictorFactory,
                )

                refined_experiments, refined_reflections = (
                    experiments,
                    reflections_for_refinement,
                )
                ref_predictor = ExperimentsPredictorFactory.from_experiments(
                    experiments,
                    force_stills=True,
                    spherical_relp=self.all_params.refinement.parameterisation.spherical_relp_model,
                )
                ref_predictor(refined_reflections)
                refined_reflections["delpsical2"] = (
                    refined_reflections["delpsical.rad"] ** 2
                )
                for expt_id in range(len(refined_experiments)):
                    refls = refined_reflections.select(
                        refined_reflections["id"] == expt_id
                    )
                    nv = NaveParameters(
                        params=self.all_params,
                        experiments=refined_experiments[expt_id : expt_id + 1],
                        reflections=refls,
                        refinery=None,
                        graph_verbose=False,
                    )
                    experiments[expt_id].crystal = nv()
                ref_predictor = ExperimentsPredictorFactory.from_experiments(
                    experiments,
                    force_stills=True,
                    spherical_relp=self.all_params.refinement.parameterisation.spherical_relp_model,
                )
                ref_predictor(refined_reflections)

            elif self.params.refinement_protocol.mode is None:
                refined_experiments, refined_reflections = (
                    experiments,
                    reflections_for_refinement,
                )

            else:
                try:
                    refined_experiments, refined_reflections = self.refine(
                        experiments, reflections_for_refinement
                    )
                except Exception as e:
                    s = str(e)
                    if len(experiments) == 1:
                        raise DialsIndexRefineError(e.message)
                    logger.info("Refinement failed:")
                    logger.info(s)
                    del experiments[-1]
                    break

            self._unit_cell_volume_sanity_check(experiments, refined_experiments)

            self.refined_reflections = refined_reflections.select(
                refined_reflections["id"] > -1
            )

            for i, expt in enumerate(self.experiments):
                ref_sel = self.refined_reflections.select(
                    self.refined_reflections["imageset_id"] == i
                )
                ref_sel = ref_sel.select(ref_sel["id"] >= 0)
                for i_expt in set(ref_sel["id"]):
                    refined_expt = refined_experiments[i_expt]
                    expt.detector = refined_expt.detector
                    expt.beam = refined_expt.beam
                    expt.goniometer = refined_expt.goniometer
                    expt.scan = refined_expt.scan
                    refined_expt.imageset = expt.imageset

            if not (
                self.all_params.refinement.parameterisation.beam.fix == "all"
                and self.all_params.refinement.parameterisation.detector.fix == "all"
            ):
                # Experimental geometry may have changed - re-map centroids to
                # reciprocal space
                self.reflections.map_centroids_to_reciprocal_space(self.experiments)

            # update for next cycle
            experiments = refined_experiments
            self.refined_experiments = refined_experiments

        if self.refined_experiments is None:
            raise DialsIndexRefineError("None of the experiments could refine.")

        # discard experiments with zero reflections after refinement
        id_set = set(self.refined_reflections["id"])
        if len(id_set) < len(self.refined_experiments):
            filtered_refined_reflections = flex.reflection_table()
            for i in range(len(self.refined_experiments)):
                if i not in id_set:
                    del self.refined_experiments[i]
            for old, new in zip(sorted(id_set), range(len(id_set))):
                subset = self.refined_reflections.select(
                    self.refined_reflections["id"] == old
                )
                subset["id"] = flex.int(len(subset), new)
                filtered_refined_reflections.extend(subset)
            self.refined_reflections = filtered_refined_reflections

        if len(self.refined_experiments) > 1:
            from dials.algorithms.indexing.compare_orientation_matrices import (
                rotation_matrix_differences,
            )

            logger.info(
                rotation_matrix_differences(self.refined_experiments.crystals())
            )

        logger.info("Final refined crystal models:")
        for i, crystal_model in enumerate(self.refined_experiments.crystals()):
            n_indexed = 0
            for _ in experiments.where(crystal=crystal_model):
                n_indexed += (self.reflections["id"] == i).count(True)
            logger.info("model %i (%i reflections):" % (i + 1, n_indexed))
            logger.info(crystal_model)

        if (
            "xyzcal.mm" in self.refined_reflections
        ):  # won't be there if refine_all_candidates = False and no isoforms

            self._xyzcal_mm_to_px(self.experiments, self.refined_reflections)
예제 #9
0
    def index(self):
        # most of this is the same as dials.algorithms.indexing.indexer.indexer_base.index(), with some stills
        # specific modifications (don't re-index after choose best orientation matrix, but use the indexing from
        # choose best orientation matrix, also don't use macrocycles) of refinement after indexing.
        # 2017 update: do accept multiple lattices per shot
        if self.params.refinement_protocol.n_macro_cycles > 1:
            raise Sorry(
                "For stills, please set refinement_protocol.n_macro_cycles = 1"
            )

        experiments = ExperimentList()

        had_refinement_error = False
        have_similar_crystal_models = False

        while True:
            self.d_min = self.params.refinement_protocol.d_min_start
            if had_refinement_error or have_similar_crystal_models:
                break
            max_lattices = self.params.multiple_lattice_search.max_lattices
            if max_lattices is not None and len(experiments) >= max_lattices:
                break
            if len(experiments) > 0:
                cutoff_fraction = \
                  self.params.multiple_lattice_search.recycle_unindexed_reflections_cutoff
                d_spacings = 1 / self.reflections['rlp'].norms()
                d_min_indexed = flex.min(
                    d_spacings.select(self.indexed_reflections))
                min_reflections_for_indexing = \
                  cutoff_fraction * len(self.reflections.select(d_spacings > d_min_indexed))
                crystal_ids = self.reflections.select(
                    d_spacings > d_min_indexed)['id']
                if (crystal_ids
                        == -1).count(True) < min_reflections_for_indexing:
                    logger.info(
                        "Finish searching for more lattices: %i unindexed reflections remaining."
                        % (min_reflections_for_indexing))
                    break

            n_lattices_previous_cycle = len(experiments)

            # index multiple lattices per shot
            if len(experiments) == 0:
                experiments.extend(self.find_lattices())
                if len(experiments) == 0:
                    raise Sorry("No suitable lattice could be found.")
            else:
                try:
                    new = self.find_lattices()
                    experiments.extend(new)
                except Exception as e:
                    logger.info("Indexing remaining reflections failed")
                    logger.debug(
                        "Indexing remaining reflections failed, exception:\n" +
                        str(e))

            # reset reflection lattice flags
            # the lattice a given reflection belongs to: a value of -1 indicates
            # that a reflection doesn't belong to any lattice so far
            self.reflections['id'] = flex.int(len(self.reflections), -1)

            self.index_reflections(experiments, self.reflections)

            if len(experiments) == n_lattices_previous_cycle:
                # no more lattices found
                break

            if not self.params.stills.refine_candidates_with_known_symmetry and self.params.known_symmetry.space_group is not None:
                # now apply the space group symmetry only after the first indexing
                # need to make sure that the symmetrized orientation is similar to the P1 model
                target_space_group = self.target_symmetry_primitive.space_group(
                )
                for i_cryst, cryst in enumerate(experiments.crystals()):
                    if i_cryst >= n_lattices_previous_cycle:
                        new_cryst, cb_op_to_primitive = self.apply_symmetry(
                            cryst, target_space_group)
                        if self.cb_op_primitive_inp is not None:
                            new_cryst = new_cryst.change_basis(
                                self.cb_op_primitive_inp)
                            logger.info(new_cryst.get_space_group().info())
                        cryst.update(new_cryst)
                        cryst.set_space_group(
                            self.params.known_symmetry.space_group.group())
                        for i_expt, expt in enumerate(experiments):
                            if expt.crystal is not cryst:
                                continue
                            if not cb_op_to_primitive.is_identity_op():
                                miller_indices = self.reflections[
                                    'miller_index'].select(
                                        self.reflections['id'] == i_expt)
                                miller_indices = cb_op_to_primitive.apply(
                                    miller_indices)
                                self.reflections['miller_index'].set_selected(
                                    self.reflections['id'] == i_expt,
                                    miller_indices)
                            if self.cb_op_primitive_inp is not None:
                                miller_indices = self.reflections[
                                    'miller_index'].select(
                                        self.reflections['id'] == i_expt)
                                miller_indices = self.cb_op_primitive_inp.apply(
                                    miller_indices)
                                self.reflections['miller_index'].set_selected(
                                    self.reflections['id'] == i_expt,
                                    miller_indices)

            # discard nearly overlapping lattices on the same shot
            if len(experiments) > 1:
                from dials.algorithms.indexing.compare_orientation_matrices \
                     import difference_rotation_matrix_axis_angle
                cryst_b = experiments.crystals()[-1]
                have_similar_crystal_models = False
                for i_a, cryst_a in enumerate(experiments.crystals()[:-1]):
                    R_ab, axis, angle, cb_op_ab = \
                      difference_rotation_matrix_axis_angle(cryst_a, cryst_b)
                    min_angle = self.params.multiple_lattice_search.minimum_angular_separation
                    if abs(angle) < min_angle:  # degrees
                        logger.info(
                            "Crystal models too similar, rejecting crystal %i:"
                            % (len(experiments)))
                        logger.info(
                            "Rotation matrix to transform crystal %i to crystal %i"
                            % (i_a + 1, len(experiments)))
                        logger.info(R_ab)
                        logger.info("Rotation of %.3f degrees" % angle +
                                    " about axis (%.3f, %.3f, %.3f)" % axis)
                        #show_rotation_matrix_differences([cryst_a, cryst_b])
                        have_similar_crystal_models = True
                        del experiments[-1]
                        break
                if have_similar_crystal_models:
                    break

            self.indexed_reflections = (self.reflections['id'] > -1)
            if self.d_min is None:
                sel = self.reflections['id'] <= -1
            else:
                sel = flex.bool(len(self.reflections), False)
                lengths = 1 / self.reflections['rlp'].norms()
                isel = (lengths >= self.d_min).iselection()
                sel.set_selected(isel, True)
                sel.set_selected(self.reflections['id'] > -1, False)
            self.unindexed_reflections = self.reflections.select(sel)

            reflections_for_refinement = self.reflections.select(
                self.indexed_reflections)

            if len(self.params.stills.isoforms) > 0:
                logger.info("")
                logger.info("#" * 80)
                logger.info("Starting refinement")
                logger.info("#" * 80)
                logger.info("")

                import copy
                isoform_experiments = ExperimentList()
                isoform_reflections = flex.reflection_table()
                # Note, changes to params after initial indexing. Cannot use tie to target when fixing the unit cell.
                self.all_params.refinement.reflections.outlier.algorithm = "null"
                self.all_params.refinement.parameterisation.crystal.fix = "cell"
                self.all_params.refinement.parameterisation.crystal.unit_cell.restraints.tie_to_target = []

                for expt_id, experiment in enumerate(experiments):
                    reflections = reflections_for_refinement.select(
                        reflections_for_refinement['id'] == expt_id)
                    reflections['id'] = flex.int(len(reflections), 0)
                    refiners = []
                    for isoform in self.params.stills.isoforms:
                        iso_experiment = copy.deepcopy(experiment)
                        crystal = iso_experiment.crystal
                        if isoform.lookup_symbol != crystal.get_space_group(
                        ).type().lookup_symbol():
                            logger.info(
                                "Crystal isoform lookup_symbol %s does not match isoform %s lookup_symbol %s"
                                % (crystal.get_space_group().type(
                                ).lookup_symbol(), isoform.name,
                                   isoform.lookup_symbol))
                            continue
                        crystal.set_B(isoform.cell.fractionalization_matrix())

                        logger.info("Refining isoform %s" % isoform.name)
                        refiners.append(
                            e_refine(params=self.all_params,
                                     experiments=ExperimentList(
                                         [iso_experiment]),
                                     reflections=reflections,
                                     graph_verbose=False))

                    if len(refiners) == 0:
                        raise Sorry(
                            "No isoforms had a lookup symbol that matched")
                    positional_rmsds = [
                        math.sqrt(P.rmsds()[0]**2 + P.rmsds()[1]**2)
                        for P in refiners
                    ]
                    logger.info("Positional rmsds for all isoforms:" +
                                str(positional_rmsds))
                    minrmsd_mm = min(positional_rmsds)
                    minindex = positional_rmsds.index(minrmsd_mm)
                    logger.info(
                        "The smallest rmsd is %5.1f um from isoform %s" %
                        (1000. * minrmsd_mm,
                         self.params.stills.isoforms[minindex].name))
                    if self.params.stills.isoforms[
                            minindex].rmsd_target_mm is not None:
                        logger.info("Asserting %f < %f" %
                                    (minrmsd_mm, self.params.stills.
                                     isoforms[minindex].rmsd_target_mm))
                        assert minrmsd_mm < self.params.stills.isoforms[
                            minindex].rmsd_target_mm
                    logger.info("Acceptable rmsd for isoform %s." %
                                (self.params.stills.isoforms[minindex].name))
                    if len(self.params.stills.isoforms) == 2:
                        logger.info(
                            "Rmsd gain over the other isoform %5.1f um." %
                            (1000. *
                             abs(positional_rmsds[0] - positional_rmsds[1])))
                    R = refiners[minindex]
                    # Now one last check to see if direct beam is out of bounds
                    if self.params.stills.isoforms[
                            minindex].beam_restraint is not None:
                        from scitbx import matrix
                        refined_beam = matrix.col(
                            R.get_experiments()
                            [0].detector[0].get_beam_centre_lab(
                                experiments[0].beam.get_s0())[0:2])
                        known_beam = matrix.col(
                            self.params.stills.isoforms[minindex].
                            beam_restraint)
                        logger.info(
                            "Asserting difference in refined beam center and expected beam center %f < %f"
                            %
                            ((refined_beam - known_beam).length(), self.params.
                             stills.isoforms[minindex].rmsd_target_mm))
                        assert (refined_beam - known_beam
                                ).length() < self.params.stills.isoforms[
                                    minindex].rmsd_target_mm
                        # future--circle of confusion could be given as a separate length in mm instead of reusing rmsd_target

                    experiment = R.get_experiments()[0]
                    experiment.crystal.identified_isoform = self.params.stills.isoforms[
                        minindex].name

                    isoform_experiments.append(experiment)
                    reflections['id'] = flex.int(len(reflections), expt_id)
                    isoform_reflections.extend(reflections)
                experiments = isoform_experiments
                reflections_for_refinement = isoform_reflections

            try:
                refined_experiments, refined_reflections = self.refine(
                    experiments, reflections_for_refinement)
            except Exception as e:
                s = str(e)
                if len(experiments) == 1:
                    raise Sorry(e)
                had_refinement_error = True
                logger.info("Refinement failed:")
                logger.info(s)
                del experiments[-1]
                break

            # sanity check for unrealistic unit cell volume increase during refinement
            # usually this indicates too many parameters are being refined given the
            # number of observations provided.
            if not self.params.refinement_protocol.disable_unit_cell_volume_sanity_check:
                for orig_expt, refined_expt in zip(experiments,
                                                   refined_experiments):
                    uc1 = orig_expt.crystal.get_unit_cell()
                    uc2 = refined_expt.crystal.get_unit_cell()
                    volume_change = abs(uc1.volume() -
                                        uc2.volume()) / uc1.volume()
                    cutoff = 0.5
                    if volume_change > cutoff:
                        msg = "\n".join((
                            "Unrealistic unit cell volume increase during refinement of %.1f%%.",
                            "Please try refining fewer parameters, either by enforcing symmetry",
                            "constraints (space_group=) and/or disabling experimental geometry",
                            "refinement (detector.fix=all and beam.fix=all). To disable this",
                            "sanity check set disable_unit_cell_volume_sanity_check=True."
                        )) % (100 * volume_change)
                        raise Sorry(msg)

            self.refined_reflections = refined_reflections.select(
                refined_reflections['id'] > -1)

            for i, imageset in enumerate(self.imagesets):
                ref_sel = self.refined_reflections.select(
                    self.refined_reflections['imageset_id'] == i)
                ref_sel = ref_sel.select(ref_sel['id'] >= 0)
                for i_expt in set(ref_sel['id']):
                    expt = refined_experiments[i_expt]
                    imageset.set_detector(expt.detector)
                    imageset.set_beam(expt.beam)
                    imageset.set_goniometer(expt.goniometer)
                    imageset.set_scan(expt.scan)
                    expt.imageset = imageset

            if not (self.all_params.refinement.parameterisation.beam.fix
                    == 'all' and
                    self.all_params.refinement.parameterisation.detector.fix
                    == 'all'):
                # Experimental geometry may have changed - re-map centroids to
                # reciprocal space

                spots_mm = self.reflections
                self.reflections = flex.reflection_table()
                for i, imageset in enumerate(self.imagesets):
                    spots_sel = spots_mm.select(spots_mm['imageset_id'] == i)
                    self.map_centroids_to_reciprocal_space(
                        spots_sel, imageset.get_detector(),
                        imageset.get_beam(), imageset.get_goniometer())
                    self.reflections.extend(spots_sel)

            # update for next cycle
            experiments = refined_experiments
            self.refined_experiments = refined_experiments

        if not 'refined_experiments' in locals():
            raise Sorry("None of the experiments could refine.")

        # discard experiments with zero reflections after refinement
        id_set = set(self.refined_reflections['id'])
        if len(id_set) < len(self.refined_experiments):
            filtered_refined_reflections = flex.reflection_table()
            for i in xrange(len(self.refined_experiments)):
                if i not in id_set:
                    del self.refined_experiments[i]
            for old, new in zip(sorted(id_set), range(len(id_set))):
                subset = self.refined_reflections.select(
                    self.refined_reflections['id'] == old)
                subset['id'] = flex.int(len(subset), new)
                filtered_refined_reflections.extend(subset)
            self.refined_reflections = filtered_refined_reflections

        if len(self.refined_experiments) > 1:
            from dials.algorithms.indexing.compare_orientation_matrices \
                 import show_rotation_matrix_differences
            show_rotation_matrix_differences(
                self.refined_experiments.crystals(), out=info_handle)

        logger.info("Final refined crystal models:")
        for i, crystal_model in enumerate(self.refined_experiments.crystals()):
            n_indexed = 0
            for i_expt in experiments.where(crystal=crystal_model):
                n_indexed += (self.reflections['id'] == i).count(True)
            logger.info("model %i (%i reflections):" % (i + 1, n_indexed))
            logger.info(crystal_model)

        if 'xyzcal.mm' in self.refined_reflections:  # won't be there if refine_all_candidates = False and no isoforms
            self.refined_reflections['xyzcal.px'] = flex.vec3_double(
                len(self.refined_reflections))
            for i, imageset in enumerate(self.imagesets):
                imgset_sel = self.refined_reflections['imageset_id'] == i
                # set xyzcal.px field in self.refined_reflections
                refined_reflections = self.refined_reflections.select(
                    imgset_sel)
                panel_numbers = flex.size_t(refined_reflections['panel'])
                xyzcal_mm = refined_reflections['xyzcal.mm']
                x_mm, y_mm, z_rad = xyzcal_mm.parts()
                xy_cal_mm = flex.vec2_double(x_mm, y_mm)
                xy_cal_px = flex.vec2_double(len(xy_cal_mm))
                for i_panel in range(len(imageset.get_detector())):
                    panel = imageset.get_detector()[i_panel]
                    sel = (panel_numbers == i_panel)
                    isel = sel.iselection()
                    ref_panel = refined_reflections.select(
                        panel_numbers == i_panel)
                    xy_cal_px.set_selected(
                        sel, panel.millimeter_to_pixel(xy_cal_mm.select(sel)))
                x_px, y_px = xy_cal_px.parts()
                scan = imageset.get_scan()
                if scan is not None:
                    z_px = scan.get_array_index_from_angle(z_rad, deg=False)
                else:
                    # must be a still image, z centroid not meaningful
                    z_px = z_rad
                xyzcal_px = flex.vec3_double(x_px, y_px, z_px)
                self.refined_reflections['xyzcal.px'].set_selected(
                    imgset_sel, xyzcal_px)
예제 #10
0
def index(experiments, reflections, params):
    """
    Index the input experiments and reflections.

    Args:
        experiments: The experiments to index
        reflections (list): A list of reflection tables containing strong spots
        params: An instance of the indexing phil scope

    Returns:
        (tuple): tuple containing:
            experiments: The indexed experiment list
            reflections (dials.array_family.flex.reflection_table):
                The indexed reflections

    """
    if experiments.crystals()[0] is not None:
        known_crystal_models = experiments.crystals()
    else:
        known_crystal_models = None

    if len(reflections) == 0:
        raise ValueError("No reflection lists found in input")
    elif len(reflections) == 1:
        reflections[0]["imageset_id"] = reflections[0]["id"]
    elif len(reflections) > 1:
        assert len(reflections) == len(experiments)
        for i in range(len(reflections)):
            reflections[i]["imageset_id"] = flex.int(len(reflections[i]), i)
            if i > 0:
                reflections[0].extend(reflections[i])
    reflections = reflections[0]

    # If there are scan and goniometer objects present but the oscillation angle is zero
    # then expt.scan and expt.goniometer to None, as the behaviour of some downstream
    # algorithms depend on the presence/absence of these objects
    for expt in experiments:
        if (expt.goniometer is not None and expt.scan is not None
                and expt.scan.is_still()):
            expt.imageset = ImageSetFactory.imageset_from_anyset(expt.imageset)
            expt.goniometer = None
            expt.scan = None

    if params.indexing.image_range:
        reflections = slice_reflections(reflections,
                                        params.indexing.image_range)

    if len(experiments) == 1 or params.indexing.joint_indexing:
        indexed_experiments, indexed_reflections = _index_experiments(
            experiments,
            reflections,
            copy.deepcopy(params),
            known_crystal_models=known_crystal_models,
        )
    else:
        indexed_experiments = ExperimentList()
        indexed_reflections = flex.reflection_table()

        with concurrent.futures.ProcessPoolExecutor(
                max_workers=params.indexing.nproc) as pool:
            futures = []
            for i_expt, expt in enumerate(experiments):
                refl = reflections.select(reflections["imageset_id"] == i_expt)
                refl["imageset_id"] = flex.size_t(len(refl), 0)
                futures.append(
                    pool.submit(
                        _index_experiments,
                        ExperimentList([expt]),
                        refl,
                        copy.deepcopy(params),
                        known_crystal_models=known_crystal_models,
                    ))

            for future in concurrent.futures.as_completed(futures):
                try:
                    idx_expts, idx_refl = future.result()
                except Exception as e:
                    print(e)
                else:
                    if idx_expts is None:
                        continue
                    for j_expt, _ in enumerate(idx_expts):
                        sel = idx_refl["id"] == j_expt
                        idx_refl["id"].set_selected(
                            sel,
                            len(indexed_experiments) + j_expt)
                    idx_refl["imageset_id"] = flex.size_t(
                        len(idx_refl), i_expt)
                    indexed_reflections.extend(idx_refl)
                    indexed_experiments.extend(idx_expts)
    return indexed_experiments, indexed_reflections
예제 #11
0
def index(experiments, reflections, params):
    """
    Index the input experiments and reflections.

    Args:
        experiments: The experiments to index
        reflections (list): A list of reflection tables containing strong spots
        params: An instance of the indexing phil scope

    Returns:
        (tuple): tuple containing:
            experiments: The indexed experiment list
            reflections (dials.array_family.flex.reflection_table):
                The indexed reflections

    Raises:
        ValueError: `reflections` is an empty list or `experiments` contains a
                    combination of sequence and stills data.
        dials.algorithms.indexing.DialsIndexError: Indexing failed.
    """
    if experiments.crystals()[0] is not None:
        known_crystal_models = experiments.crystals()
    else:
        known_crystal_models = None

    if len(reflections) == 0:
        raise ValueError("No reflection lists found in input")
    elif len(reflections) == 1:
        if "imageset_id" not in reflections[0]:
            reflections[0]["imageset_id"] = reflections[0]["id"]
    elif len(reflections) > 1:
        assert len(reflections) == len(experiments)
        for i in range(len(reflections)):
            reflections[i]["imageset_id"] = flex.int(len(reflections[i]), i)
            if i > 0:
                reflections[0].extend(reflections[i])
    reflections = reflections[0]

    if params.indexing.image_range:
        reflections = slice_reflections(reflections, params.indexing.image_range)

    if len(experiments) == 1 or params.indexing.joint_indexing:
        indexed_experiments, indexed_reflections = _index_experiments(
            experiments,
            reflections,
            copy.deepcopy(params),
            known_crystal_models=known_crystal_models,
        )
    else:
        indexed_experiments = ExperimentList()
        indexed_reflections = flex.reflection_table()

        with concurrent.futures.ProcessPoolExecutor(
            max_workers=params.indexing.nproc
        ) as pool:
            futures = []
            for i_expt, expt in enumerate(experiments):
                refl = reflections.select(reflections["imageset_id"] == i_expt)
                refl["imageset_id"] = flex.size_t(len(refl), 0)
                futures.append(
                    pool.submit(
                        _index_experiments,
                        ExperimentList([expt]),
                        refl,
                        copy.deepcopy(params),
                        known_crystal_models=known_crystal_models,
                    )
                )
            tables_list = []
            for future in concurrent.futures.as_completed(futures):
                try:
                    idx_expts, idx_refl = future.result()
                except Exception as e:
                    print(e)
                else:
                    if idx_expts is None:
                        continue
                    # Update the experiment ids by incrementing by the number of indexed
                    # experiments already in the list
                    ##FIXME below, is i_expt correct - or should it be the
                    # index of the 'future'?
                    idx_refl["imageset_id"] = flex.size_t(idx_refl.size(), i_expt)
                    tables_list.append(idx_refl)
                    indexed_experiments.extend(idx_expts)
            tables_list = renumber_table_id_columns(tables_list)
            for table in tables_list:
                indexed_reflections.extend(table)
    return indexed_experiments, indexed_reflections
예제 #12
0
#mad_index_params.indexing.refinement_protocol.mode = "repredict_only"
mad_index_params.indexing.refinement_protocol.mode = "ignore"

EXP_LIST = ExperimentList()
shot_indices = [0, 1, 4, 8, 17, 19]  # shots to search for reflections
expected_Nref = [0, 3, 85, 14, 172, 189]  # expected number of reflections

for idx in shot_indices:
    iset = IMGSET[idx:idx + 1]
    iset.set_detector(DETECTOR)
    iset.set_beam(BEAM)

    sub_EXP_LIST = ExperimentListFactory.from_stills_and_crystal(
        iset, crystal=None, load_models=True)

    EXP_LIST.extend(sub_EXP_LIST)

REFLS = []
CRYSTALS = []
RMSD = []

RMSD_MAXs = []


def tst_find_spots():

    global REFLS
    for i in range(len(EXP_LIST)):
        refls_strong = flex.reflection_table.from_observations(
            EXP_LIST[i:i + 1], spot_par)
        Nrefls = len(refls_strong)
예제 #13
0
class Processor(object):
    def __init__(self, params, composite_tag=None):
        self.params = params
        self.composite_tag = composite_tag

        # The convention is to put %s in the phil parameter to add a tag to
        # each output datafile. Save the initial templates here.
        self.datablock_filename_template = params.output.datablock_filename
        self.strong_filename_template = params.output.strong_filename
        self.indexed_filename_template = params.output.indexed_filename
        self.refined_experiments_filename_template = params.output.refined_experiments_filename
        self.integrated_filename_template = params.output.integrated_filename
        self.integrated_experiments_filename_template = params.output.integrated_experiments_filename

        if params.output.composite_output:
            assert composite_tag is not None
            from dxtbx.model.experiment_list import ExperimentList
            from dials.array_family import flex
            #self.all_strong_reflections = flex.reflection_table() # no composite strong pickles yet
            self.all_indexed_experiments = ExperimentList()
            self.all_indexed_reflections = flex.reflection_table()
            self.all_integrated_experiments = ExperimentList()
            self.all_integrated_reflections = flex.reflection_table()
            self.all_int_pickle_filenames = []
            self.all_int_pickles = []

            self.setup_filenames(composite_tag)

    def setup_filenames(self, tag):
        # before processing, set output paths according to the templates
        if self.datablock_filename_template is not None and "%s" in self.datablock_filename_template:
            self.params.output.datablock_filename = os.path.join(
                self.params.output.output_dir,
                self.datablock_filename_template % ("idx-" + tag))
        if self.strong_filename_template is not None and "%s" in self.strong_filename_template:
            self.params.output.strong_filename = os.path.join(
                self.params.output.output_dir,
                self.strong_filename_template % ("idx-" + tag))
        if self.indexed_filename_template is not None and "%s" in self.indexed_filename_template:
            self.params.output.indexed_filename = os.path.join(
                self.params.output.output_dir,
                self.indexed_filename_template % ("idx-" + tag))
        if self.refined_experiments_filename_template is not None and "%s" in self.refined_experiments_filename_template:
            self.params.output.refined_experiments_filename = os.path.join(
                self.params.output.output_dir,
                self.refined_experiments_filename_template % ("idx-" + tag))
        if self.integrated_filename_template is not None and "%s" in self.integrated_filename_template:
            self.params.output.integrated_filename = os.path.join(
                self.params.output.output_dir,
                self.integrated_filename_template % ("idx-" + tag))
        if self.integrated_experiments_filename_template is not None and "%s" in self.integrated_experiments_filename_template:
            self.params.output.integrated_experiments_filename = os.path.join(
                self.params.output.output_dir,
                self.integrated_experiments_filename_template % ("idx-" + tag))

    def process_datablock(self, tag, datablock):
        import os

        if not self.params.output.composite_output:
            self.setup_filenames(tag)
        self.tag = tag

        if self.params.output.datablock_filename:
            from dxtbx.datablock import DataBlockDumper
            dump = DataBlockDumper(datablock)
            dump.as_json(self.params.output.datablock_filename)

        # Do the processing
        try:
            self.pre_process(datablock)
        except Exception as e:
            print("Error in pre-process", tag, str(e))
            if not self.params.dispatch.squash_errors: raise
            return
        try:
            if self.params.dispatch.find_spots:
                observed = self.find_spots(datablock)
            else:
                print("Spot Finding turned off. Exiting")
                return
        except Exception as e:
            print("Error spotfinding", tag, str(e))
            if not self.params.dispatch.squash_errors: raise
            return
        try:
            if self.params.dispatch.index:
                experiments, indexed = self.index(datablock, observed)
            else:
                print("Indexing turned off. Exiting")
                return
        except Exception as e:
            print("Couldn't index", tag, str(e))
            if not self.params.dispatch.squash_errors: raise
            return
        try:
            experiments, indexed = self.refine(experiments, indexed)
        except Exception as e:
            print("Error refining", tag, str(e))
            if not self.params.dispatch.squash_errors: raise
            return
        try:
            if self.params.dispatch.integrate:
                integrated = self.integrate(experiments, indexed)
            else:
                print("Integration turned off. Exiting")
                return
        except Exception as e:
            print("Error integrating", tag, str(e))
            if not self.params.dispatch.squash_errors: raise
            return

    def pre_process(self, datablock):
        """ Add any pre-processing steps here """
        pass

    def find_spots(self, datablock):
        from time import time
        from dials.array_family import flex
        st = time()

        logger.info('*' * 80)
        logger.info('Finding Strong Spots')
        logger.info('*' * 80)

        # Find the strong spots
        observed = flex.reflection_table.from_observations(
            datablock, self.params)

        # Reset z coordinates for dials.image_viewer; see Issues #226 for details
        xyzobs = observed['xyzobs.px.value']
        for i in xrange(len(xyzobs)):
            xyzobs[i] = (xyzobs[i][0], xyzobs[i][1], 0)
        bbox = observed['bbox']
        for i in xrange(len(bbox)):
            bbox[i] = (bbox[i][0], bbox[i][1], bbox[i][2], bbox[i][3], 0, 1)

        if self.params.output.composite_output:
            pass  # no composite strong pickles yet
        else:
            # Save the reflections to file
            logger.info('\n' + '-' * 80)
            if self.params.output.strong_filename:
                self.save_reflections(observed,
                                      self.params.output.strong_filename)

        logger.info('')
        logger.info('Time Taken = %f seconds' % (time() - st))
        return observed

    def index(self, datablock, reflections):
        from dials.algorithms.indexing.indexer import indexer_base
        from time import time
        import copy
        st = time()

        logger.info('*' * 80)
        logger.info('Indexing Strong Spots')
        logger.info('*' * 80)

        imagesets = datablock.extract_imagesets()

        params = copy.deepcopy(self.params)
        # don't do scan-varying refinement during indexing
        params.refinement.parameterisation.scan_varying = False

        if hasattr(self, 'known_crystal_models'):
            known_crystal_models = self.known_crystal_models
        else:
            known_crystal_models = None

        if params.indexing.stills.method_list is None:
            idxr = indexer_base.from_parameters(
                reflections,
                imagesets,
                known_crystal_models=known_crystal_models,
                params=params)
            idxr.index()
        else:
            indexing_error = None
            for method in params.indexing.stills.method_list:
                params.indexing.method = method
                try:
                    idxr = indexer_base.from_parameters(reflections,
                                                        imagesets,
                                                        params=params)
                    idxr.index()
                except Exception as e:
                    logger.info("Couldn't index using method %s" % method)
                    if indexing_error is None:
                        if e is None:
                            e = Exception("Couldn't index using method %s" %
                                          method)
                        indexing_error = e
                else:
                    indexing_error = None
                    break
            if indexing_error is not None:
                raise indexing_error

        indexed = idxr.refined_reflections
        experiments = idxr.refined_experiments

        if known_crystal_models is not None:
            from dials.array_family import flex
            filtered = flex.reflection_table()
            for idx in set(indexed['miller_index']):
                sel = indexed['miller_index'] == idx
                if sel.count(True) == 1:
                    filtered.extend(indexed.select(sel))
            logger.info(
                "Filtered duplicate reflections, %d out of %d remaining" %
                (len(filtered), len(indexed)))
            print("Filtered duplicate reflections, %d out of %d remaining" %
                  (len(filtered), len(indexed)))
            indexed = filtered

        logger.info('')
        logger.info('Time Taken = %f seconds' % (time() - st))
        return experiments, indexed

    def refine(self, experiments, centroids):
        if self.params.dispatch.refine:
            from dials.algorithms.refinement import RefinerFactory
            from time import time
            st = time()

            logger.info('*' * 80)
            logger.info('Refining Model')
            logger.info('*' * 80)

            refiner = RefinerFactory.from_parameters_data_experiments(
                self.params, centroids, experiments)

            refiner.run()
            experiments = refiner.get_experiments()
            predicted = refiner.predict_for_indexed()
            centroids['xyzcal.mm'] = predicted['xyzcal.mm']
            centroids['entering'] = predicted['entering']
            centroids = centroids.select(
                refiner.selection_used_for_refinement())

            # Re-estimate mosaic estimates
            from dials.algorithms.indexing.nave_parameters import nave_parameters
            nv = nave_parameters(params=self.params,
                                 experiments=experiments,
                                 reflections=centroids,
                                 refinery=refiner,
                                 graph_verbose=False)
            nv()
            acceptance_flags_nv = nv.nv_acceptance_flags
            centroids = centroids.select(acceptance_flags_nv)

        if self.params.output.composite_output:
            if self.params.output.refined_experiments_filename or self.params.output.indexed_filename:
                assert self.params.output.refined_experiments_filename is not None and self.params.output.indexed_filename is not None
                from dials.array_family import flex
                n = len(self.all_indexed_experiments)
                self.all_indexed_experiments.extend(experiments)
                for i, experiment in enumerate(experiments):
                    refls = centroids.select(centroids['id'] == i)
                    refls['id'] = flex.int(len(refls), n)
                    self.all_indexed_reflections.extend(refls)
                    n += 1
        else:
            # Dump experiments to disk
            if self.params.output.refined_experiments_filename:
                from dxtbx.model.experiment_list import ExperimentListDumper
                dump = ExperimentListDumper(experiments)
                dump.as_json(self.params.output.refined_experiments_filename)

            if self.params.output.indexed_filename:
                self.save_reflections(centroids,
                                      self.params.output.indexed_filename)

        if self.params.dispatch.refine:
            logger.info('')
            logger.info('Time Taken = %f seconds' % (time() - st))

        return experiments, centroids

    def integrate(self, experiments, indexed):
        from time import time

        st = time()

        logger.info('*' * 80)
        logger.info('Integrating Reflections')
        logger.info('*' * 80)

        indexed, _ = self.process_reference(indexed)

        # Get the integrator from the input parameters
        logger.info('Configuring integrator from input parameters')
        from dials.algorithms.profile_model.factory import ProfileModelFactory
        from dials.algorithms.integration.integrator import IntegratorFactory
        from dials.array_family import flex

        # Compute the profile model
        # Predict the reflections
        # Match the predictions with the reference
        # Create the integrator
        experiments = ProfileModelFactory.create(self.params, experiments,
                                                 indexed)
        logger.info("")
        logger.info("=" * 80)
        logger.info("")
        logger.info("Predicting reflections")
        logger.info("")
        predicted = flex.reflection_table.from_predictions_multi(
            experiments,
            dmin=self.params.prediction.d_min,
            dmax=self.params.prediction.d_max,
            margin=self.params.prediction.margin,
            force_static=self.params.prediction.force_static)
        predicted.match_with_reference(indexed)
        logger.info("")
        integrator = IntegratorFactory.create(self.params, experiments,
                                              predicted)

        # Integrate the reflections
        integrated = integrator.integrate()

        # correct integrated intensities for absorption correction, if necessary
        for abs_params in self.params.integration.absorption_correction:
            if abs_params.apply and abs_params.algorithm == "fuller_kapton":
                from dials.algorithms.integration.kapton_correction import multi_kapton_correction
                experiments, integrated = multi_kapton_correction(
                    experiments,
                    integrated,
                    abs_params.fuller_kapton,
                    logger=logger)()

        if self.params.significance_filter.enable:
            from dials.algorithms.integration.stills_significance_filter import SignificanceFilter
            sig_filter = SignificanceFilter(self.params)
            refls = sig_filter(experiments, integrated)
            logger.info(
                "Removed %d reflections out of %d when applying significance filter"
                % (len(integrated) - len(refls), len(integrated)))
            if len(refls) == 0:
                raise Sorry(
                    "No reflections left after applying significance filter")
            integrated = refls

        # Delete the shoeboxes used for intermediate calculations, if requested
        if self.params.integration.debug.delete_shoeboxes and 'shoebox' in integrated:
            del integrated['shoebox']

        if self.params.output.composite_output:
            if self.params.output.integrated_experiments_filename or self.params.output.integrated_filename:
                assert self.params.output.integrated_experiments_filename is not None and self.params.output.integrated_filename is not None
                from dials.array_family import flex
                n = len(self.all_integrated_experiments)
                self.all_integrated_experiments.extend(experiments)
                for i, experiment in enumerate(experiments):
                    refls = integrated.select(integrated['id'] == i)
                    refls['id'] = flex.int(len(refls), n)
                    self.all_integrated_reflections.extend(refls)
                    n += 1
        else:
            # Dump experiments to disk
            if self.params.output.integrated_experiments_filename:
                from dxtbx.model.experiment_list import ExperimentListDumper
                dump = ExperimentListDumper(experiments)
                dump.as_json(
                    self.params.output.integrated_experiments_filename)

            if self.params.output.integrated_filename:
                # Save the reflections
                self.save_reflections(integrated,
                                      self.params.output.integrated_filename)

        self.write_integration_pickles(integrated, experiments)
        from dials.algorithms.indexing.stills_indexer import calc_2D_rmsd_and_displacements

        rmsd_indexed, _ = calc_2D_rmsd_and_displacements(indexed)
        log_str = "RMSD indexed (px): %f\n" % (rmsd_indexed)
        for i in xrange(6):
            bright_integrated = integrated.select(
                (integrated['intensity.sum.value'] /
                 flex.sqrt(integrated['intensity.sum.variance'])) >= i)
            if len(bright_integrated) > 0:
                rmsd_integrated, _ = calc_2D_rmsd_and_displacements(
                    bright_integrated)
            else:
                rmsd_integrated = 0
            log_str += "N reflections integrated at I/sigI >= %d: % 4d, RMSD (px): %f\n" % (
                i, len(bright_integrated), rmsd_integrated)

        for crystal_model in experiments.crystals():
            if hasattr(crystal_model, 'get_domain_size_ang'):
                log_str += ". Final ML model: domain size angstroms: %f, half mosaicity degrees: %f" % (
                    crystal_model.get_domain_size_ang(),
                    crystal_model.get_half_mosaicity_deg())

        logger.info(log_str)

        logger.info('')
        logger.info('Time Taken = %f seconds' % (time() - st))
        return integrated

    def write_integration_pickles(self,
                                  integrated,
                                  experiments,
                                  callback=None):
        """
    Write a serialized python dictionary with integrated intensities and other information
    suitible for use by cxi.merge or prime.postrefine.
    @param integrated Reflection table with integrated intensities
    @param experiments Experiment list. One integration pickle for each experiment will be created.
    @param callback Deriving classes can use callback to make further modifications to the dictionary
    before it is serialized. Callback should be a function with this signature:
    def functionname(params, outfile, frame), where params is the phil scope, outfile is the path
    to the pickle that will be saved, and frame is the python dictionary to be serialized.
    """
        try:
            picklefilename = self.params.output.integration_pickle
        except AttributeError:
            return

        if self.params.output.integration_pickle is not None:

            from libtbx import easy_pickle
            import os
            from xfel.command_line.frame_extractor import ConstructFrame
            from dials.array_family import flex

            # Split everything into separate experiments for pickling
            for e_number in xrange(len(experiments)):
                experiment = experiments[e_number]
                e_selection = integrated['id'] == e_number
                reflections = integrated.select(e_selection)

                frame = ConstructFrame(reflections, experiment).make_frame()
                frame["pixel_size"] = experiment.detector[0].get_pixel_size(
                )[0]

                if not hasattr(self, 'tag') or self.tag is None:
                    try:
                        # if the data was a file on disc, get the path
                        event_timestamp = os.path.splitext(
                            experiments[0].imageset.paths()[0])[0]
                    except NotImplementedError:
                        # if the data is in memory only, check if the reader set a timestamp on the format object
                        event_timestamp = experiment.imageset.reader(
                        ).get_format(0).timestamp
                    event_timestamp = os.path.basename(event_timestamp)
                    if event_timestamp.find("shot-") == 0:
                        event_timestamp = os.path.splitext(event_timestamp)[
                            0]  # micromanage the file name
                else:
                    event_timestamp = self.tag
                if hasattr(self.params.output, "output_dir"):
                    outfile = os.path.join(
                        self.params.output.output_dir,
                        self.params.output.integration_pickle %
                        (e_number, event_timestamp))
                else:
                    outfile = os.path.join(
                        os.path.dirname(self.params.output.integration_pickle),
                        self.params.output.integration_pickle %
                        (e_number, event_timestamp))

                if callback is not None:
                    callback(self.params, outfile, frame)

                if self.params.output.composite_output:
                    self.all_int_pickle_filenames.append(
                        os.path.basename(outfile))
                    self.all_int_pickles.append(frame)
                else:
                    easy_pickle.dump(outfile, frame)

    def process_reference(self, reference):
        ''' Load the reference spots. '''
        from dials.array_family import flex
        from time import time
        if reference is None:
            return None, None
        st = time()
        assert ("miller_index" in reference)
        assert ("id" in reference)
        logger.info('Processing reference reflections')
        logger.info(' read %d strong spots' % len(reference))
        mask = reference.get_flags(reference.flags.indexed)
        rubbish = reference.select(mask == False)
        if mask.count(False) > 0:
            reference.del_selected(mask == False)
            logger.info(' removing %d unindexed reflections' %
                        mask.count(True))
        if len(reference) == 0:
            raise Sorry('''
        Invalid input for reference reflections.
        Expected > %d indexed spots, got %d
      ''' % (0, len(reference)))
        mask = reference['miller_index'] == (0, 0, 0)
        if mask.count(True) > 0:
            rubbish.extend(reference.select(mask))
            reference.del_selected(mask)
            logger.info(' removing %d reflections with hkl (0,0,0)' %
                        mask.count(True))
        mask = reference['id'] < 0
        if mask.count(True) > 0:
            raise Sorry('''
        Invalid input for reference reflections.
        %d reference spots have an invalid experiment id
      ''' % mask.count(True))
        logger.info(' using %d indexed reflections' % len(reference))
        logger.info(' found %d junk reflections' % len(rubbish))
        logger.info(' time taken: %g' % (time() - st))
        return reference, rubbish

    def save_reflections(self, reflections, filename):
        ''' Save the reflections to file. '''
        from time import time
        st = time()
        logger.info('Saving %d reflections to %s' %
                    (len(reflections), filename))
        reflections.as_pickle(filename)
        logger.info(' time taken: %g' % (time() - st))

    def finalize(self):
        ''' Perform any final operations '''
        if self.params.output.composite_output:
            # Dump composite files to disk
            if len(self.all_indexed_experiments
                   ) > 0 and self.params.output.refined_experiments_filename:
                from dxtbx.model.experiment_list import ExperimentListDumper
                dump = ExperimentListDumper(self.all_indexed_experiments)
                dump.as_json(self.params.output.refined_experiments_filename)

            if len(self.all_indexed_reflections
                   ) > 0 and self.params.output.indexed_filename:
                self.save_reflections(self.all_indexed_reflections,
                                      self.params.output.indexed_filename)

            if len(
                    self.all_integrated_experiments
            ) > 0 and self.params.output.integrated_experiments_filename:
                from dxtbx.model.experiment_list import ExperimentListDumper
                dump = ExperimentListDumper(self.all_integrated_experiments)
                dump.as_json(
                    self.params.output.integrated_experiments_filename)

            if len(self.all_integrated_reflections
                   ) > 0 and self.params.output.integrated_filename:
                self.save_reflections(self.all_integrated_reflections,
                                      self.params.output.integrated_filename)

            # Create a tar archive of the integration dictionary pickles
            if len(self.all_int_pickles
                   ) > 0 and self.params.output.integration_pickle:
                import tarfile, StringIO, time, cPickle as pickle
                tar_template_integration_pickle = self.params.output.integration_pickle.replace(
                    '%d', '%s')
                outfile = os.path.join(
                    self.params.output.output_dir,
                    tar_template_integration_pickle %
                    ('x', self.composite_tag)) + ".tar"
                tar = tarfile.TarFile(outfile, "w")
                for i, (fname, d) in enumerate(
                        zip(self.all_int_pickle_filenames,
                            self.all_int_pickles)):
                    string = StringIO.StringIO(pickle.dumps(d, protocol=2))
                    info = tarfile.TarInfo(name=fname)
                    info.size = len(string.buf)
                    info.mtime = time.time()
                    tar.addfile(tarinfo=info, fileobj=string)
                tar.close()
예제 #14
0
class Index(object):
    def __init__(self, experiments, reflections, params):

        self._params = params

        if experiments.crystals()[0] is not None:
            known_crystal_models = experiments.crystals()
        else:
            known_crystal_models = None

        if len(reflections) == 0:
            raise Sorry("No reflection lists found in input")
        elif len(reflections) == 1:
            reflections[0]["imageset_id"] = reflections[0]["id"]
        elif len(reflections) > 1:
            assert len(reflections) == len(experiments)
            for i in range(len(reflections)):
                reflections[i]["imageset_id"] = flex.int(
                    len(reflections[i]), i)
                if i > 0:
                    reflections[0].extend(reflections[i])
        reflections = reflections[0]

        for expt in experiments:
            if (expt.goniometer is not None and expt.scan is not None
                    and expt.scan.get_oscillation()[1] == 0):
                expt.goniometer = None
                expt.scan = None

        if self._params.indexing.image_range:
            reflections = slice_reflections(reflections,
                                            self._params.indexing.image_range)

        if len(experiments) == 1 or self._params.indexing.joint_indexing:
            try:
                self._indexed_experiments, self._indexed_reflections = index_experiments(
                    experiments,
                    reflections,
                    copy.deepcopy(params),
                    known_crystal_models=known_crystal_models,
                )
            except DialsIndexError as e:
                raise Sorry(e.message)
        else:
            self._indexed_experiments = ExperimentList()
            self._indexed_reflections = flex.reflection_table()

            import concurrent.futures

            with concurrent.futures.ProcessPoolExecutor(
                    max_workers=params.indexing.nproc) as pool:
                futures = []
                for i_expt, expt in enumerate(experiments):
                    refl = reflections.select(
                        reflections["imageset_id"] == i_expt)
                    refl["imageset_id"] = flex.size_t(len(refl), 0)
                    futures.append(
                        pool.submit(
                            index_experiments,
                            ExperimentList([expt]),
                            refl,
                            copy.deepcopy(params),
                            known_crystal_models=known_crystal_models,
                        ))

                for future in concurrent.futures.as_completed(futures):
                    try:
                        idx_expts, idx_refl = future.result()
                    except Exception as e:
                        print(e)
                    else:
                        if idx_expts is None:
                            continue
                        for j_expt, _ in enumerate(idx_expts):
                            sel = idx_refl["id"] == j_expt
                            idx_refl["id"].set_selected(
                                sel,
                                len(self._indexed_experiments) + j_expt)
                        idx_refl["imageset_id"] = flex.size_t(
                            len(idx_refl), i_expt)
                        self._indexed_reflections.extend(idx_refl)
                        self._indexed_experiments.extend(idx_expts)

    def export_experiments(self, filename):
        experiments = self._indexed_experiments
        if self._params.output.split_experiments:
            logger.info("Splitting experiments before output")

            experiments = ExperimentList(
                [copy.deepcopy(re) for re in experiments])
        logger.info("Saving refined experiments to %s" % filename)

        assert experiments.is_consistent()
        dump.experiment_list(experiments, filename)

    def export_reflections(self, filename):
        logger.info("Saving refined reflections to %s" % filename)
        self._indexed_reflections.as_msgpack_file(filename=filename)
예제 #15
0
    def run(self):
        """Execute the script."""
        from dials.util import log
        from time import time
        from libtbx import easy_mp
        import copy

        # Parse the command line
        params, options, all_paths = self.parser.parse_args(
            show_diff_phil=False, return_unhandled=True, quick_parse=True)

        # Check we have some filenames
        if not all_paths:
            self.parser.print_help()
            return

        # Mask validation
        for mask_path in params.spotfinder.lookup.mask, params.integration.lookup.mask:
            if mask_path is not None and not os.path.isfile(mask_path):
                raise Sorry("Mask %s not found" % mask_path)

        # Save the options
        self.options = options
        self.params = params

        st = time()

        # Configure logging
        #log.config(
        #    params.verbosity, info="exafel_spotfinding.process.log", debug="exafel.spot_finding.debug.log"
        #)

        bad_phils = [f for f in all_paths if os.path.splitext(f)[1] == ".phil"]
        if len(bad_phils) > 0:
            self.parser.print_help()
            logger.error(
                "Error: the following phil files were not understood: %s" %
                (", ".join(bad_phils)))
            return

        # Log the diff phil
        diff_phil = self.parser.diff_phil.as_str()
        if diff_phil is not "":
            logger.info("The following parameters have been modified:\n")
            logger.info(diff_phil)

        for abs_params in self.params.integration.absorption_correction:
            if abs_params.apply:
                if not (self.params.integration.debug.output
                        and not self.params.integration.debug.separate_files):
                    raise Sorry(
                        "Shoeboxes must be saved to integration intermediates to apply an absorption correction. "
                        +
                        "Set integration.debug.output=True, integration.debug.separate_files=False and "
                        +
                        "integration.debug.delete_shoeboxes=True to temporarily store shoeboxes."
                    )

        self.load_reference_geometry()
        from dials.command_line.dials_import import ManualGeometryUpdater

        update_geometry = ManualGeometryUpdater(params)

        # Import stuff
        logger.info("Loading files...")
        pre_import = params.dispatch.pre_import or len(all_paths) == 1
        if True:  #pre_import:
            # Handle still imagesets by breaking them apart into multiple experiments
            # Further handle single file still imagesets (like HDF5) by tagging each
            # frame using its index
            experiments = ExperimentList()
            for path in all_paths:
                experiments.extend(do_import(path, load_models=False))

            indices = []
            basenames = []
            split_experiments = []
            for i, imageset in enumerate(experiments.imagesets()):
                assert len(imageset) == 1
                paths = imageset.paths()
                indices.append(i)
                basenames.append(
                    os.path.splitext(os.path.basename(paths[0]))[0])
                split_experiments.append(experiments[i:i + 1])
            tags = []
            for i, basename in zip(indices, basenames):
                if basenames.count(basename) > 1:
                    tags.append("%s_%05d" % (basename, i))
                else:
                    tags.append(basename)

            # Wrapper function
            def do_work(i, item_list):
                processor = SpotFinding_Processor(copy.deepcopy(params),
                                                  composite_tag="%04d" % i,
                                                  rank=i)
                if params.LS49.dump_CBF:
                    print('READING IN TIMESTAMPS TO DUMP')
                    # Read in file with timestamps information
                    processor.timestamps_to_dump = []
                    for fin in glob.glob(
                            os.path.join(
                                self.params.LS49.
                                path_to_rayonix_crystal_models,
                                'idx-fee_data*')):
                        #for fin in glob.glob(os.path.join(self.params.LS49.path_to_rayonix_crystal_models, 'int-0-*')):
                        int_file = os.path.basename(fin)
                        ts = int_file[13:30]
                        processor.timestamps_to_dump.append(ts)
                    #with open(os.path.join(self.params.output.output_dir,'../timestamps_to_dump.dat'), 'r') as fin:
                    #    for line in fin:
                    #        if line !='\n':
                    #            ts = line.split()[0].strip()
                    #            processor.timestamps_to_dump.append(ts)

                from dials.array_family import flex
                all_spots_from_rank = flex.reflection_table()
                for item in item_list:
                    try:
                        assert len(item[1]) == 1
                        experiment = item[1][0]
                        experiment.load_models()
                        imageset = experiment.imageset
                        update_geometry(imageset)
                        experiment.beam = imageset.get_beam()
                        experiment.detector = imageset.get_detector()
                    except RuntimeError as e:
                        logger.warning(
                            "Error updating geometry on item %s, %s" %
                            (str(item[0]), str(e)))
                        continue

                    if self.reference_detector is not None:
                        from dxtbx.model import Detector
                        experiment = item[1][0]
                        imageset = experiment.imageset
                        imageset.set_detector(
                            Detector.from_dict(
                                self.reference_detector.to_dict()))
                        experiment.detector = imageset.get_detector()

                    refl_table = processor.process_experiments(
                        item[0], item[1], item[2])
                    if refl_table is not None:
                        all_spots_from_rank.extend(refl_table)
                processor.finalize()
                return all_spots_from_rank

            iterable = zip(tags, split_experiments, indices)

        # Process the data
        if params.mp.method == 'mpi':
            from mpi4py import MPI
            comm = MPI.COMM_WORLD
            rank = comm.Get_rank(
            )  # each process in MPI has a unique id, 0-indexed
            size = comm.Get_size(
            )  # size: number of processes running in this job

            # Configure the logging
            if params.output.logging_dir is None:
                info_path = ''
                debug_path = ''
            else:
                import sys
                log_path = os.path.join(params.output.logging_dir,
                                        "log_rank%04d.out" % rank)
                error_path = os.path.join(params.output.logging_dir,
                                          "error_rank%04d.out" % rank)
                print("Redirecting stdout to %s" % log_path)
                print("Redirecting stderr to %s" % error_path)
                sys.stdout = open(log_path, 'a', buffering=0)
                sys.stderr = open(error_path, 'a', buffering=0)
                print("Should be redirected now")

                info_path = os.path.join(params.output.logging_dir,
                                         "info_rank%04d.out" % rank)
                debug_path = os.path.join(params.output.logging_dir,
                                          "debug_rank%04d.out" % rank)

            from dials.util import log
            print('IOTA_ALL_SPOTS_RANKS_0')
            #log.config(params.verbosity, info=info_path, debug=debug_path)
            subset = [
                item for i, item in enumerate(iterable)
                if (i + rank) % size == 0
            ]
            all_spots_from_rank = do_work(rank, subset)
            all_spots_rank0 = comm.gather(all_spots_from_rank, root=0)
            print('IOTA_ALL_SPOTS_RANKS_1')
            exit()
            if rank == 0:
                from dials.array_family import flex
                all_spots = flex.reflection_table()
                for ii, refl_table in enumerate(all_spots_rank0):
                    if refl_table is not None:
                        all_spots.extend(refl_table)
                from libtbx.easy_pickle import dump
                #dump('all_spots.pickle', all_spots_rank0)
                #dump('all_experiments.pickle', experiments)
                #print ('IOTA_ALL_SPOTS_RANKS_2')
                #print ('IOTA_ALL_SPOTS_RANKS_3')
                from dials.algorithms.spot_finding import per_image_analysis
                from six.moves import cStringIO as StringIO
                s = StringIO()
                # Assuming one datablock. Might be dangerous
                # FIXME
                from dxtbx.format.cbf_writer import FullCBFWriter
                for i, imageset in enumerate(experiments.imagesets()):
                    print("Number of centroids per image for imageset %i:" % i,
                          file=s)
                    #from IPython import embed; embed(); exit()
                    print('IOTA_ALL_SPOTS_RANKS_4')
                    stats = custom_stats_imageset(
                        imageset, all_spots.select(all_spots['img_id'] == i))
                    n_spots_total = flex.int(stats.n_spots_total)
                    max_number_of_spots = max(stats.n_spots_total)
                    for num_spots in range(1, max_number_of_spots + 1):
                        print("IOTA_NUMBER_OF_SPOTS %d %d" %
                              (num_spots,
                               len(
                                   n_spots_total.select(
                                       n_spots_total == num_spots))))
                    if max_number_of_spots > 0:
                        # assuming one imageset per experiment here : applicable for stills
                        ts = imageset.get_image_identifier(0)
                        xfel_ts = ts[0:4] + ts[5:7] + ts[8:10] + ts[
                            11:13] + ts[14:16] + ts[17:19] + ts[20:23]
                        cbf_path = os.path.join(params.output.logging_dir,
                                                'jungfrau_%s.cbf' % xfel_ts)
                        cbf_writer = FullCBFWriter(imageset=imageset)
                        cbf_writer.write_cbf(cbf_path)
                    per_image_analysis.print_table(stats)
                    logger.info(s.getvalue())
            comm.barrier()
        else:
            do_work(0, iterable)
예제 #16
0
    def index(self, provided_experiments=None, debug=False):
        ''' This step does  1. find_lattices (via a method like fft1d)
                        2. Assign hkl indices (through index_reflections)
                        3. Housekeeping like apply_symmetry, discard too similar models
    '''

        experiments = ExperimentList()
        have_similar_crystal_models = False

        self.d_min = self.params.refinement_protocol.d_min_start

        # Find lattices i.e the basis vectors & unit cell params
        # Possible to index multiple lattices  ??
        while True:
            max_lattices = self.params.multiple_lattice_search.max_lattices
            if max_lattices is not None and len(experiments) >= max_lattices:
                break
            n_lattices_previous_cycle = len(experiments)
            if len(experiments) == 0:
                experiments.extend(self.find_lattices())
            else:
                try:
                    new = self.find_lattices()
                    experiments.extend(new)
                except Sorry:
                    print('Indexing remaining reflections failed')
                    break

            if len(experiments) == 0:
                raise Sorry("No suitable lattice could be found.")

            # Initialize id values as -1 since no indexing has been done yet
            self.reflections['id'] = flex.int(len(self.reflections), -1)

            # Now index reflections
            self.index_reflections(experiments, self.reflections, debug=debug)

            # Housekeeping. Apply symmetry
            self._apply_symmetry_post_indexing(experiments, self.reflections,
                                               n_lattices_previous_cycle)

            # Aug_Refactor :: probably unnecessary to remove stuff below but still doing so to adapt to new style
            # Never mind, might keep it to have IOTA stuff working
            '''
      target_space_group = self.target_symmetry_primitive.space_group()
      for i_cryst, cryst in enumerate(experiments.crystals()):
        if i_cryst >= n_lattices_previous_cycle:
          new_cryst, cb_op_to_primitive = self.apply_symmetry(
                                        cryst, target_space_group)
          if provided_experiments is None:
            if self.cb_op_primitive_inp is not None:
              new_cryst = new_cryst.change_basis(self.cb_op_primitive_inp)
              logger.info(new_cryst.get_space_group().info())
            cryst.update(new_cryst)
            cryst.set_space_group(
                self.params.known_symmetry.space_group.group())
          for i_expt, expt in enumerate(experiments):
            if expt.crystal is not cryst:
              continue
            if not cb_op_to_primitive.is_identity_op():
              miller_indices = self.reflections['miller_index'].select(
                  self.reflections['id'] == i_expt)

              if provided_experiments is None:
                miller_indices = cb_op_to_primitive.apply(miller_indices)
              self.reflections['miller_index'].set_selected(
                  self.reflections['id'] == i_expt, miller_indices)

            if self.cb_op_primitive_inp is not None:
              miller_indices = self.reflections['miller_index'].select(
                  self.reflections['id'] == i_expt)

              if provided_experiments is None:
                miller_indices = self.cb_op_primitive_inp.apply(miller_indices)
              self.reflections['miller_index'].set_selected(
                  self.reflections['id'] == i_expt, miller_indices)
              # IOTA
              from scitbx.matrix import sqr
              hklfrac=flex.mat3_double(len(miller_indices), sqr(cryst.get_A()).inverse())*self.reflections['rlp'].select(self.reflections['id']==i_expt)
              self.reflections['fractional_miller_index'].set_selected(self.reflections['id']==i_expt, hklfrac)
    '''

        logger.info("\nIndexed crystal models:")
        self.show_experiments(experiments, self.reflections, d_min=self.d_min)

        # Discard nearly overlapping lattices
        # difference_rotation_matrix_axis_angle function is there still in DIALS 2.0 so no need to change anything below
        if len(experiments) > 1:
            from dials.algorithms.indexing.compare_orientation_matrices \
              import difference_rotation_matrix_axis_angle
            cryst_b = experiments.crystals()[-1]
            have_similar_crystal_models = False
            for i_a, cryst_a in enumerate(experiments.crystals()[:-1]):
                R_ab, axis, angle, cb_op_ab = \
                difference_rotation_matrix_axis_angle(cryst_a, cryst_b)
                min_angle = self.params.multiple_lattice_search.minimum_angular_separation
                if abs(angle) < min_angle:  # degrees
                    logger.info(
                        "Crystal models too similar, rejecting crystal %i:" %
                        (len(experiments)))
                    logger.info(
                        "Rotation matrix to transform crystal %i to crystal %i"
                        % (i_a + 1, len(experiments)))
                    logger.info(R_ab)
                    logger.info("Rotation of %.3f degrees" % angle +
                                " about axis (%.3f, %.3f, %.3f)" % axis)
                    have_similar_crystal_models = True
                    del experiments[-1]
                    break

        self.indexed_reflections = (self.reflections['id'] > -1)
        self.experiments = experiments