Example #1
0
def test_filter_reflections(run_in_tmpdir):
    # Make a dummy reflection table for the test setting some values and flags
    rt = flex.reflection_table.empty_standard(6)
    rt["iobs"] = flex.size_t_range(len(rt))
    rt["panel"] = flex.size_t_range(len(rt))
    rt["id"] = flex.size_t([0] * 5 + [1])
    rt["d"] = flex.double([50, 40, 3.0, 2.5, 2.0, 1.0])
    mask1 = flex.bool([True] * 3 + [False] * 3)
    mask2 = flex.bool([True, False] * 3)
    rt.set_flags(mask1, rt.flags.integrated)
    rt.set_flags(mask2, rt.flags.reference_spot)
    rt_name = "test_refs.refl"
    rt.as_file(rt_name)

    # Test flag expression
    cmd = [
        "dials.filter_reflections",
        rt_name,
        "flag_expression='integrated & ~reference_spot'",
    ]
    result = procrunner.run(cmd)
    assert not result.returncode and not result.stderr
    ref = flex.reflection_table.from_file("filtered.refl")
    # The test selects only the 2nd reflection
    assert len(ref) == 1
    assert list(ref["iobs"]) == [1]

    # Test filter by experiment id
    cmd = ["dials.filter_reflections", rt_name, "id=0"]
    result = procrunner.run(cmd)
    assert not result.returncode and not result.stderr
    ref = flex.reflection_table.from_file("filtered.refl")
    # The test selects only the first five reflections
    assert len(ref) == 5
    assert list(ref["iobs"]) == [0, 1, 2, 3, 4]

    # Test filter by panel
    cmd = ["dials.filter_reflections", rt_name, "panel=5"]
    result = procrunner.run(cmd)
    assert not result.returncode and not result.stderr
    ref = flex.reflection_table.from_file("filtered.refl")
    # The test selects only the last reflection
    assert len(ref) == 1
    assert list(ref["iobs"]) == [5]

    # Test filter by resolution
    cmd = ["dials.filter_reflections", rt_name, "d_max=3.0", "d_min=2.0"]
    result = procrunner.run(cmd)
    assert not result.returncode and not result.stderr
    ref = flex.reflection_table.from_file("filtered.refl")
    # The test selects only the 3rd, 4th and 5th reflections
    assert len(ref) == 3
    assert list(ref["iobs"]) == [2, 3, 4]

    # Test printing analysis
    cmd = ["dials.filter_reflections", rt_name]
    result = procrunner.run(cmd)
    assert not result.returncode and not result.stderr
Example #2
0
def reflections(tmpdir_factory):
    # Make a dummy reflection table for the test setting some values and flags
    rt = flex.reflection_table.empty_standard(6)
    rt["iobs"] = flex.size_t_range(len(rt))
    rt["panel"] = flex.size_t_range(len(rt))
    rt["id"] = flex.int([0] * 5 + [1])
    rt["d"] = flex.double([50, 40, 3.0, 2.5, 2.0, 1.0])
    mask1 = flex.bool([True] * 3 + [False] * 3)
    mask2 = flex.bool([True, False] * 3)
    rt.set_flags(mask1, rt.flags.integrated)
    rt.set_flags(mask2, rt.flags.reference_spot)
    tmpdir = tmpdir_factory.mktemp("filter_reflections")
    rt_name = tmpdir.join("test_refs.refl")
    rt.as_file(rt_name.strpath)
    return rt_name
Example #3
0
    def __init__(self, reflections, experiments, params):
        self.reflections = reflections
        self.experiments = experiments

        self.params = params.indexing
        self.all_params = params
        self.refined_experiments = None
        self.hkl_offset = None

        if self.params.index_assignment.method == "local":
            self._assign_indices = assign_indices.AssignIndicesLocal(
                epsilon=self.params.index_assignment.local.epsilon,
                delta=self.params.index_assignment.local.delta,
                l_min=self.params.index_assignment.local.l_min,
                nearest_neighbours=self.params.index_assignment.local.
                nearest_neighbours,
            )
        else:
            self._assign_indices = assign_indices.AssignIndicesGlobal(
                tolerance=self.params.index_assignment.simple.hkl_tolerance)

        if self.all_params.refinement.reflections.outlier.algorithm in (
                "auto",
                libtbx.Auto,
        ):
            if self.experiments[0].goniometer is None:
                self.all_params.refinement.reflections.outlier.algorithm = "sauter_poon"
            else:
                # different default to dials.refine
                # tukey is faster and more appropriate at the indexing step
                self.all_params.refinement.reflections.outlier.algorithm = "tukey"

        for expt in self.experiments[1:]:
            if expt.detector.is_similar_to(self.experiments[0].detector):
                expt.detector = self.experiments[0].detector
            if expt.goniometer is not None and expt.goniometer.is_similar_to(
                    self.experiments[0].goniometer):
                expt.goniometer = self.experiments[0].goniometer
                # can only share a beam if we share a goniometer?
                if expt.beam.is_similar_to(self.experiments[0].beam):
                    expt.beam = self.experiments[0].beam
                if self.params.combine_scans and expt.scan == self.experiments[
                        0].scan:
                    expt.scan = self.experiments[0].scan

        if "flags" in self.reflections:
            strong_sel = self.reflections.get_flags(
                self.reflections.flags.strong)
            if strong_sel.count(True) > 0:
                self.reflections = self.reflections.select(strong_sel)
        if "flags" not in self.reflections or strong_sel.count(True) == 0:
            # backwards compatibility for testing
            self.reflections.set_flags(
                flex.size_t_range(len(self.reflections)),
                self.reflections.flags.strong)

        self._setup_symmetry()
        self.d_min = None

        self.setup_indexing()
def test_correct_correction(dials_data):
    """Test that the anvil absorption correction is producing expected values."""
    data_dir = dials_data("centroid_test_data")

    # We'll need an integrated reflection table and an experiment list.
    reflections_file = data_dir.join("integrated.pickle")
    experiments_file = data_dir.join("experiments.json")

    # We need only test with the first ten reflections.
    reflections = flex.reflection_table.from_file(reflections_file)
    reflections = reflections.select(flex.size_t_range(10))

    experiment = ExperimentList.from_file(experiments_file)[0]

    # Test the correction that would be applied to a DAC with 1.5mm-thick anvils,
    # aligned along the z-axis at goniometer zero-datum.
    old_reflections = copy.deepcopy(reflections)
    correct_intensities_for_dac_attenuation(experiment, reflections, (0, 0, 1),
                                            1.5)

    cases = {
        "intensity.sum.value": reflections.flags.integrated_sum,
        "intensity.sum.variance": reflections.flags.integrated_sum,
        "intensity.prf.value": reflections.flags.integrated_prf,
        "intensity.prf.variance": reflections.flags.integrated_prf,
    }
    corrections = flex.double([
        0,
        6.653068275094517,
        6.522657529202368,
        6.3865190053761,
        6.587270967838122,
        6.43403642876391,
        6.39216742203502,
        0,
        6.152148372872684,
        6.0474840161407375,
    ])
    for case, flag in cases.items():
        flagged = reflections.get_flags(flag)

        target_correction = corrections.select(flagged)
        if "variance" in case:
            target_correction = flex.pow2(target_correction)

        intensity_correction = (reflections[case] /
                                old_reflections[case]).select(flagged)

        # Check that the un-integrated reflections are unchanged.
        assert pytest.approx(reflections[case].select(
            ~flagged)) == old_reflections[case].select(~flagged), (
                "Un-integrated reflections have been erroneously "
                "'corrected'.")

        # Check that the applied corrections are correct.
        assert pytest.approx(
            intensity_correction, rel=1e-5
        ) == list(target_correction), (
            "The applied intensity correction to %s doesn't seem to be correct."
            % case)
Example #5
0
    def _id_refs_to_keep(self, obs_data):
        """Create a selection of observations that pass certain conditions.

    This step includes rejection of reflections too close to the spindle,
    reflections measured outside the scan range, rejection of the (0,0,0)
    Miller index and rejection of reflections with the overload flag set.
    Outlier rejection is done later."""

        # first exclude reflections with miller index set to 0,0,0
        sel1 = obs_data['miller_index'] != (0, 0, 0)

        # exclude reflections with overloads, as these have worse centroids
        sel2 = ~obs_data.get_flags(obs_data.flags.overloaded)

        # combine selections
        sel = sel1 & sel2
        inc = flex.size_t_range(len(obs_data)).select(sel)
        obs_data = obs_data.select(sel)

        # Default to True to pass the following test if there is no rotation axis
        # for a particular experiment
        to_keep = flex.bool(len(inc), True)

        for iexp, exp in enumerate(self._experiments):
            axis = self._axes[iexp]
            if not axis or exp.scan is None: continue
            if exp.scan.get_oscillation()[1] == 0.0: continue
            sel = obs_data['id'] == iexp
            s0 = self._s0vecs[iexp]
            s1 = obs_data['s1'].select(sel)
            phi = obs_data['xyzobs.mm.value'].parts()[2].select(sel)

            # first test: reject reflections for which the parallelepiped formed
            # between the gonio axis, s0 and s1 has a volume of less than the cutoff.
            # Those reflections are by definition closer to the spindle-beam
            # plane and for low values of the cutoff are troublesome to
            # integrate anyway.
            p_vol = flex.abs(
                s1.cross(flex.vec3_double(s1.size(), s0)).dot(axis))
            passed1 = p_vol > self._close_to_spindle_cutoff

            # second test: reject reflections that lie outside the scan range
            passed2 = exp.scan.is_angle_valid(phi, deg=False)

            # sanity check to catch a mutilated scan that does not make sense
            if passed2.count(True) == 0:
                from libtbx.utils import Sorry
                raise Sorry(
                    "Experiment id {0} contains no reflections with valid "
                    "scan angles".format(iexp))

            # combine tests
            to_update = passed1 & passed2
            to_keep.set_selected(sel, to_update)

        inc = inc.select(to_keep)

        return inc
Example #6
0
  def __init__(self, reflections,
                     experiments,
                     nref_per_degree=None,
                     max_sample_size=None,
                     min_sample_size=0,
                     close_to_spindle_cutoff=0.1,
                     outlier_detector=None,
                     weighting_strategy_override=None,
                     verbosity=0):

    # set verbosity
    self._verbosity = verbosity

    # keep track of models
    self._experiments = experiments
    goniometers = [e.goniometer for e in self._experiments]
    self._axes = [matrix.col(g.get_rotation_axis()) if g else None for g in goniometers]
    self._s0vecs = [matrix.col(e.beam.get_s0()) for e in self._experiments]

    # keep track of the original indices of the reflections
    reflections['iobs'] = flex.size_t_range(len(reflections))

    # set up the reflection inclusion criteria
    self._close_to_spindle_cutoff = close_to_spindle_cutoff #too close to spindle
    self._outlier_detector = outlier_detector #for outlier rejection
    self._nref_per_degree = nref_per_degree #random subsets
    self._max_sample_size = max_sample_size #sample size ceiling
    self._min_sample_size = min_sample_size #sample size floor

    # exclude reflections that fail some inclusion criteria
    refs_to_keep = self._id_refs_to_keep(reflections)
    self._accepted_refs_size = len(refs_to_keep)

    # set entering flags for all reflections
    reflections['entering'] = calculate_entering_flags(reflections,
      self._experiments)

    # set observed frame numbers for all reflections if not already present
    calculate_frame_numbers(reflections, self._experiments)

    # reset all use flags
    self.reset_accepted_reflections(reflections)

    # put full list of indexed reflections aside and select only the reflections
    # that were not excluded to manage
    self._indexed = reflections
    self._reflections = reflections.select(flex.size_t(refs_to_keep))

    # set weights for all kept reflections
    if weighting_strategy_override is not None:
      self._weighting_strategy = weighting_strategy_override
    self._weighting_strategy.calculate_weights(self._reflections)

    # not known until the manager is finalised
    self._sample_size = None

    return
Example #7
0
    def __call__(self, experiments):
        """
        Do the spot finding.

        :param experiments: The experiments to process
        :return: The observed spots
        """
        from dials.array_family import flex
        import six.moves.cPickle as pickle
        from dxtbx.format.image import ImageBool

        # Loop through all the imagesets and find the strong spots
        reflections = flex.reflection_table()
        for i, experiment in enumerate(experiments):

            imageset = experiment.imageset

            # Find the strong spots in the sweep
            logger.info("-" * 80)
            logger.info("Finding strong spots in imageset %d" % i)
            logger.info("-" * 80)
            logger.info("")
            table, hot_mask = self._find_spots_in_imageset(imageset)
            table["id"] = flex.int(table.nrows(), i)
            reflections.extend(table)

            # Write a hot pixel mask
            if self.write_hot_mask:
                if not imageset.external_lookup.mask.data.empty():
                    for m1, m2 in zip(hot_mask,
                                      imageset.external_lookup.mask.data):
                        m1 &= m2.data()
                    imageset.external_lookup.mask.data = ImageBool(hot_mask)
                else:
                    imageset.external_lookup.mask.data = ImageBool(hot_mask)
                imageset.external_lookup.mask.filename = "%s_%d.pickle" % (
                    self.hot_mask_prefix,
                    i,
                )

                # Write the hot mask
                with open(imageset.external_lookup.mask.filename,
                          "wb") as outfile:
                    pickle.dump(hot_mask,
                                outfile,
                                protocol=pickle.HIGHEST_PROTOCOL)

        # Set the strong spot flag
        reflections.set_flags(flex.size_t_range(len(reflections)),
                              reflections.flags.strong)

        # Check for overloads
        reflections.is_overloaded(experiments)

        # Return the reflections
        return reflections
Example #8
0
    def __call__(self, datablock):
        '''
    Do the spot finding.

    :param datablock: The datablock to process
    :return: The observed spots

    '''
        from dials.array_family import flex
        import cPickle as pickle

        # Loop through all the imagesets and find the strong spots
        reflections = flex.reflection_table()
        for i, imageset in enumerate(datablock.extract_imagesets()):

            # Find the strong spots in the sweep
            logger.info('-' * 80)
            logger.info('Finding strong spots in imageset %d' % i)
            logger.info('-' * 80)
            logger.info('')
            table, hot_mask = self._find_spots_in_imageset(imageset)
            table['id'] = flex.int(table.nrows(), i)
            reflections.extend(table)

            # Write a hot pixel mask
            if self.write_hot_mask:
                if imageset.external_lookup.mask.data is not None:
                    for m1, m2 in zip(hot_mask,
                                      imageset.external_lookup.mask.data):
                        m1 &= m2
                    imageset.external_lookup.mask.data = hot_mask
                else:
                    imageset.external_lookup.mask.data = hot_mask
                imageset.external_lookup.mask.filename = "%s_%d.pickle" % (
                    self.hot_mask_prefix, i)

                # Write the hot mask
                with open(imageset.external_lookup.mask.filename,
                          "wb") as outfile:
                    pickle.dump(hot_mask,
                                outfile,
                                protocol=pickle.HIGHEST_PROTOCOL)

        # Set the strong spot flag
        reflections.set_flags(flex.size_t_range(len(reflections)),
                              reflections.flags.strong)

        # Check for overloads
        reflections.is_overloaded(datablock)

        # Return the reflections
        return reflections
Example #9
0
  def _id_refs_to_keep(self, obs_data):
    """Create a selection of observations that pass certain conditions.

    This step includes rejection of reflections too close to the spindle,
    reflections measured outside the scan range, rejection of the (0,0,0)
    Miller index and rejection of reflections with the overload flag set.
    Outlier rejection is done later."""

    # first exclude reflections with miller index set to 0,0,0
    sel1 = obs_data['miller_index'] != (0,0,0)

    # exclude reflections with overloads, as these have worse centroids
    sel2 = ~obs_data.get_flags(obs_data.flags.overloaded)

    # combine selections
    sel = sel1 & sel2
    inc = flex.size_t_range(len(obs_data)).select(sel)
    obs_data = obs_data.select(sel)

    # Default to True to pass the following test if there is no rotation axis
    # for a particular experiment
    to_keep = flex.bool(len(inc), True)

    for iexp, exp in enumerate(self._experiments):
      axis = self._axes[iexp]
      if not axis or exp.scan is None: continue
      if exp.scan.get_oscillation()[1] == 0.0: continue
      sel = obs_data['id'] == iexp
      s0 = self._s0vecs[iexp]
      s1 = obs_data['s1'].select(sel)
      phi = obs_data['xyzobs.mm.value'].parts()[2].select(sel)

      # first test: reject reflections for which the parallelepiped formed
      # between the gonio axis, s0 and s1 has a volume of less than the cutoff.
      # Those reflections are by definition closer to the spindle-beam
      # plane and for low values of the cutoff are troublesome to
      # integrate anyway.
      p_vol = flex.abs(s1.cross(flex.vec3_double(s1.size(), s0)).dot(axis))
      passed1 = p_vol > self._close_to_spindle_cutoff

      # second test: reject reflections that lie outside the scan range
      phi_min, phi_max = exp.scan.get_oscillation_range(deg=False)
      passed2 = (phi >= phi_min) & (phi <= phi_max)

      # combine tests
      to_update = passed1 & passed2
      to_keep.set_selected(sel, to_update)

    inc = inc.select(to_keep)

    return inc
Example #10
0
  def __call__(self, datablock):
    '''
    Do the spot finding.

    :param datablock: The datablock to process
    :return: The observed spots

    '''
    from dials.array_family import flex
    import cPickle as pickle

    # Loop through all the imagesets and find the strong spots
    reflections = flex.reflection_table()
    for i, imageset in enumerate(datablock.extract_imagesets()):

      # Find the strong spots in the sweep
      logger.info('-' * 80)
      logger.info('Finding strong spots in imageset %d' % i)
      logger.info('-' * 80)
      logger.info('')
      table, hot_mask = self._find_spots_in_imageset(imageset)
      table['id'] = flex.int(table.nrows(), i)
      reflections.extend(table)

      # Write a hot pixel mask
      if self.write_hot_mask:
        if imageset.external_lookup.mask.data is not None:
          and_mask = []
          for m1, m2 in zip(imageset.external_lookup.mask.data, hot_mask):
            and_mask.append(m1 & m2)
          imageset.external_lookup.mask.data = tuple(and_mask)
        else:
          imageset.external_lookup.mask.data = hot_mask
        imageset.external_lookup.mask.filename = "hot_mask_%d.pickle" % i

        # Write the hot mask
        with open(imageset.external_lookup.mask.filename, "wb") as outfile:
          pickle.dump(hot_mask, outfile, protocol=pickle.HIGHEST_PROTOCOL)

    # Set the strong spot flag
    reflections.set_flags(
      flex.size_t_range(len(reflections)),
      reflections.flags.strong)

    # Check for overloads
    reflections.is_overloaded(datablock)

    # Return the reflections
    return reflections
Example #11
0
    def __init__(self, reflections, experiments, params=None):
        '''Init function for iota_indexer is different from indexer_base in that
       _setup_symmetry function is not called. All features only work for stills'''

        # FIXME this should not be called the stills_indexer __init__ method
        # FIXME need to write own __init__ function
        #stills_indexer.__init__(self, reflections, imagesets, params)
        self.reflections = reflections
        self.experiments = experiments
        #if params is None: params = master_params
        self.params = params.indexing
        self.all_params = params
        self.refined_experiments = None
        self.hkl_offset = None
        #Aug_Refactor
        #if self.params.index_assignment.method == "local":
        #  self._assign_indices = assign_indices.AssignIndicesLocal(
        #    epsilon=self.params.index_assignment.local.epsilon,
        #    delta=self.params.index_assignment.local.delta,
        #    l_min=self.params.index_assignment.local.l_min,
        #    nearest_neighbours=self.params.index_assignment.local.nearest_neighbours)
        #else:
        #  self._assign_indices = assign_indices.AssignIndicesGlobal(
        #    tolerance=self.params.index_assignment.simple.hkl_tolerance)
        ###

        if self.params.refinement_protocol.n_macro_cycles in ('auto',
                                                              libtbx.Auto):
            self.params.refinement_protocol.n_macro_cycles = 1

        for expt in self.experiments[1:]:
            if expt.detector.is_similar_to(self.experiments[0].detector):
                expt.detector = self.experiments[0].detector

        if 'flags' in self.reflections:
            strong_sel = self.reflections.get_flags(
                self.reflections.flags.strong)
            if strong_sel.count(True) > 0:
                self.reflections = self.reflections.select(strong_sel)
        if 'flags' not in self.reflections or strong_sel.count(True) == 0:
            # backwards compatibility for testing
            self.reflections.set_flags(
                flex.size_t_range(len(self.reflections)),
                self.reflections.flags.strong)

        self._setup_symmetry()
        self.d_min = None
        self.setup_indexing()
Example #12
0
    def _id_refs_to_keep(self, obs_data):
        """Create a selection of observations that pass certain conditions.

        Stills-specific version removes checks relevant only to experiments
        with a rotation axis."""

        # first exclude reflections with miller index set to 0,0,0
        sel1 = obs_data["miller_index"] != (0, 0, 0)

        # exclude reflections with overloads, as these have worse centroids
        sel2 = ~obs_data.get_flags(obs_data.flags.overloaded)

        # combine selections
        sel = sel1 & sel2
        inc = flex.size_t_range(len(obs_data)).select(sel)

        return inc
def test1():

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_filter_reflections")
    os.chdir(tmp_dir)

    # make a dummy reflection table for the test, setting some flags
    from dials.array_family import flex

    rt = flex.reflection_table.empty_standard(6)
    rt["iobs"] = flex.size_t_range(len(rt))
    mask1 = flex.bool([True] * 3 + [False] * 3)
    mask2 = flex.bool([True, False] * 3)
    rt.set_flags(mask1, rt.flags.integrated)
    rt.set_flags(mask2, rt.flags.bad_spot)
    rt_name = "test_refs.pickle"
    rt.as_pickle(rt_name)

    cmd = "dev.dials.filter_reflections " + rt_name + " inclusions.flag=integrated" + " exclusions.flag=bad_spot"
    print cmd

    try:
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()
        # load results
        ref = flex.reflection_table.from_pickle("filtered.pickle")
    finally:
        os.chdir(cwd)
        # clean up tmp dir
        shutil.rmtree(tmp_dir)

    # The test selects only 1 reflection
    assert len(ref) == 1
    assert list(ref["iobs"]) == [1]

    print "OK"

    return
Example #14
0
def test_filter_reflections(tmpdir):
  tmpdir.chdir()

  # make a dummy reflection table for the test, setting some flags
  rt = flex.reflection_table.empty_standard(6)
  rt['iobs'] = flex.size_t_range(len(rt))
  mask1 = flex.bool([True] * 3 + [False] * 3)
  mask2 = flex.bool([True, False] * 3)
  rt.set_flags(mask1, rt.flags.integrated)
  rt.set_flags(mask2, rt.flags.bad_spot)
  rt_name = "test_refs.pickle"
  rt.as_pickle(rt_name)

  cmd = "dev.dials.filter_reflections " + rt_name + " inclusions.flag=integrated" + \
    " exclusions.flag=bad_spot"

  result = easy_run.fully_buffered(command=cmd).raise_if_errors()
  # load results
  ref = flex.reflection_table.from_pickle("filtered.pickle")

  # The test selects only 1 reflection
  assert len(ref) == 1
  assert list(ref['iobs']) == [1]
def test1():

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_filter_reflections")
    os.chdir(tmp_dir)

    # make a dummy reflection table for the test, setting some flags
    from dials.array_family import flex
    rt = flex.reflection_table.empty_standard(6)
    rt['iobs'] = flex.size_t_range(len(rt))
    mask1 = flex.bool([True] * 3 + [False] * 3)
    mask2 = flex.bool([True, False] * 3)
    rt.set_flags(mask1, rt.flags.integrated)
    rt.set_flags(mask2, rt.flags.bad_spot)
    rt_name = "test_refs.pickle"
    rt.as_pickle(rt_name)

    cmd = "dev.dials.filter_reflections " + rt_name + " inclusions.flag=integrated" + \
      " exclusions.flag=bad_spot"
    print cmd

    try:
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()
        # load results
        ref = flex.reflection_table.from_pickle("filtered.pickle")
    finally:
        os.chdir(cwd)

    # The test selects only 1 reflection
    assert len(ref) == 1
    assert list(ref['iobs']) == [1]

    print "OK"

    return
Example #16
0
def export_sadabs(integrated_data, experiment_list, hklout, run=0,
                  summation=False, include_partials=False, keep_partials=False,
                  debug=False, predict=True):
  '''Export data from integrated_data corresponding to experiment_list to a
  file for input to SADABS. FIXME probably need to make a .p4p file as
  well...'''

  from dials.array_family import flex
  from scitbx import matrix
  import math

  # for the moment assume (and assert) that we will convert data from exactly
  # one lattice...

  assert(len(experiment_list) == 1)
  # select reflections that are assigned to an experiment (i.e. non-negative id)

  integrated_data = integrated_data.select(integrated_data['id'] >= 0)
  assert max(integrated_data['id']) == 0

  if not summation:
    assert('intensity.prf.value' in integrated_data)

  # strip out negative variance reflections: these should not really be there
  # FIXME Doing select on summation results. Should do on profile result if
  # present? Yes

  if 'intensity.prf.variance' in integrated_data:
    selection = integrated_data.get_flags(
      integrated_data.flags.integrated,
      all=True)
  else:
    selection = integrated_data.get_flags(
      integrated_data.flags.integrated_sum)
  integrated_data = integrated_data.select(selection)

  selection = integrated_data['intensity.sum.variance'] <= 0
  if selection.count(True) > 0:
    integrated_data.del_selected(selection)
    logger.info('Removing %d reflections with negative variance' % \
          selection.count(True))

  if 'intensity.prf.variance' in integrated_data:
    selection = integrated_data['intensity.prf.variance'] <= 0
    if selection.count(True) > 0:
      integrated_data.del_selected(selection)
      logger.info('Removing %d profile reflections with negative variance' % \
            selection.count(True))

  if include_partials:
    integrated_data = sum_partial_reflections(integrated_data)
    integrated_data = scale_partial_reflections(integrated_data)

  if 'partiality' in integrated_data:
    selection = integrated_data['partiality'] < 0.99
    if selection.count(True) > 0 and not keep_partials:
      integrated_data.del_selected(selection)
      logger.info('Removing %d incomplete reflections' % \
        selection.count(True))

  experiment = experiment_list[0]
  assert(not experiment.scan is None)

  # sort data before output
  nref = len(integrated_data['miller_index'])
  indices = flex.size_t_range(nref)
  perm = sorted(indices, key=lambda k: integrated_data['miller_index'][k])
  integrated_data = integrated_data.select(flex.size_t(perm))

  assert (not experiment.goniometer is None)

  axis = matrix.col(experiment.goniometer.get_rotation_axis_datum())

  beam = matrix.col(experiment.beam.get_direction())
  s0 = matrix.col(experiment.beam.get_s0())

  F = matrix.sqr(experiment.goniometer.get_fixed_rotation())
  S = matrix.sqr(experiment.goniometer.get_setting_rotation())
  unit_cell = experiment.crystal.get_unit_cell()

  if debug:
    m_format = '%6.3f%6.3f%6.3f\n%6.3f%6.3f%6.3f\n%6.3f%6.3f%6.3f'
    c_format = '%.2f %.2f %.2f %.2f %.2f %.2f'

    logger.info('Unit cell parameters from experiment: %s' % (c_format %
         unit_cell.parameters()))
    logger.info('Symmetry: %s' % experiment.crystal.get_space_group().type(
         ).lookup_symbol())

    logger.info('Goniometer fixed matrix:\n%s' % (m_format % F.elems))
    logger.info('Goniometer setting matrix:\n%s' % (m_format % S.elems))
    logger.info('Goniometer scan axis:\n%6.3f%6.3f%6.3f' % (axis.elems))

  # detector scaling info
  assert(len(experiment.detector) == 1)
  panel = experiment.detector[0]
  dims = panel.get_image_size()
  pixel = panel.get_pixel_size()
  fast_axis = matrix.col(panel.get_fast_axis())
  slow_axis = matrix.col(panel.get_slow_axis())
  normal = fast_axis.cross(slow_axis)
  detector2t = s0.angle(normal, deg=True)
  origin = matrix.col(panel.get_origin())

  if debug:
    logger.info('Detector fast, slow axes:')
    logger.info('%6.3f%6.3f%6.3f' % (fast_axis.elems))
    logger.info('%6.3f%6.3f%6.3f' % (slow_axis.elems))
    logger.info('Detector two theta (degrees): %.2f' % detector2t)

  scl_x = 512.0 / (dims[0] * pixel[0])
  scl_y = 512.0 / (dims[1] * pixel[1])

  image_range = experiment.scan.get_image_range()

  from cctbx.array_family import flex as cflex # implicit import
  from cctbx.miller import map_to_asu_isym # implicit import

  # gather the required information for the reflection file

  nref = len(integrated_data['miller_index'])
  zdet = flex.double(integrated_data['xyzcal.px'].parts()[2])

  miller_index = integrated_data['miller_index']

  I = None
  sigI = None

  # export including scale factors

  if 'lp' in integrated_data:
    lp = integrated_data['lp']
  else:
    lp = flex.double(nref, 1.0)
  if 'dqe' in integrated_data:
    dqe = integrated_data['dqe']
  else:
    dqe = flex.double(nref, 1.0)
  scl = lp / dqe

  if summation:
    I = integrated_data['intensity.sum.value'] * scl
    V = integrated_data['intensity.sum.variance'] * scl * scl
    assert V.all_gt(0)
    sigI = flex.sqrt(V)
  else:
    I = integrated_data['intensity.prf.value'] * scl
    V = integrated_data['intensity.prf.variance'] * scl * scl
    assert V.all_gt(0)
    sigI = flex.sqrt(V)

  # figure out scaling to make sure data fit into format 2F8.2 i.e. Imax < 1e5

  Imax = flex.max(I)

  if debug:
    logger.info('Maximum intensity in file: %8.2f' % Imax)

  if Imax > 99999.0:
    scale = 99999.0 / Imax
    I = I * scale
    sigI = sigI * scale

  phi_start, phi_range = experiment.scan.get_image_oscillation(image_range[0])

  if predict:
    logger.info('Using scan static predicted spot locations')
    from dials.algorithms.spot_prediction import ScanStaticReflectionPredictor
    predictor = ScanStaticReflectionPredictor(experiment)
    UB = experiment.crystal.get_A()
    predictor.for_reflection_table(integrated_data, UB)

  if not experiment.crystal.num_scan_points:
    logger.info('No scan varying model: use static')
    static = True
  else:
    static = False

  fout = open(hklout, 'w')

  for j in range(nref):

    h, k, l = miller_index[j]

    if predict:
      x_mm, y_mm, z_rad = integrated_data['xyzcal.mm'][j]
    else:
      x_mm, y_mm, z_rad = integrated_data['xyzobs.mm.value'][j]

    z0 = integrated_data['xyzcal.px'][j][2]
    istol = int(round(10000 * unit_cell.stol((h, k, l))))

    if predict or static:
      # work from a scan static model & assume perfect goniometer
      # FIXME maybe should work back in the option to predict spot positions
      UB = experiment.crystal.get_A()
      phi = phi_start + z0 * phi_range
      R = axis.axis_and_angle_as_r3_rotation_matrix(phi, deg=True)
      RUB = S * R * F * UB
    else:
      # properly compute RUB for every reflection
      UB = experiment.crystal.get_A_at_scan_point(int(round(z0)))
      phi = phi_start + z0 * phi_range
      R = axis.axis_and_angle_as_r3_rotation_matrix(phi, deg=True)
      RUB = S * R * F * UB

    x = RUB * (h, k, l)
    s = (s0 + x).normalize()

    # can also compute s based on centre of mass of spot
    # s = (origin + x_mm * fast_axis + y_mm * slow_axis).normalize()

    astar = (RUB * (1, 0, 0)).normalize()
    bstar = (RUB * (0, 1, 0)).normalize()
    cstar = (RUB * (0, 0, 1)).normalize()

    ix = beam.dot(astar)
    iy = beam.dot(bstar)
    iz = beam.dot(cstar)

    dx = s.dot(astar)
    dy = s.dot(bstar)
    dz = s.dot(cstar)

    x = x_mm * scl_x
    y = y_mm * scl_y
    z = (z_rad * 180 / math.pi - phi_start) / phi_range

    fout.write('%4d%4d%4d%8.2f%8.2f%4d%8.5f%8.5f%8.5f%8.5f%8.5f%8.5f' % \
               (h, k, l, I[j], sigI[j], run, ix, dx, iy, dy, iz, dz))
    fout.write('%7.2f%7.2f%8.2f%7.2f%5d\n' % (x, y, z, detector2t, istol))

  fout.close()
  logger.info('Output %d reflections to %s' % (nref, hklout))
  return
Example #17
0
    def __init__(self,
                 reflections,
                 experiments,
                 nref_per_degree=None,
                 max_sample_size=None,
                 min_sample_size=0,
                 close_to_spindle_cutoff=0.02,
                 outlier_detector=None,
                 weighting_strategy_override=None,
                 verbosity=0):

        # set verbosity
        self._verbosity = verbosity

        # keep track of models
        self._experiments = experiments
        goniometers = [e.goniometer for e in self._experiments]
        self._axes = [
            matrix.col(g.get_rotation_axis()) if g else None
            for g in goniometers
        ]
        self._s0vecs = [matrix.col(e.beam.get_s0()) for e in self._experiments]

        # unset the refinement flags (creates flags field if needed)
        reflections.unset_flags(flex.size_t_range(len(reflections)),
                                flex.reflection_table.flags.used_in_refinement)

        # check that the observed beam vectors are stored: if not, compute them
        n_s1_set = set_obs_s1(reflections, experiments)
        if n_s1_set > 0 and verbosity > 0:
            logger.debug("Set scattering vectors for %d reflections", n_s1_set)

        # keep track of the original indices of the reflections
        reflections['iobs'] = flex.size_t_range(len(reflections))

        # set up the reflection inclusion criteria
        self._close_to_spindle_cutoff = close_to_spindle_cutoff  #too close to spindle
        self._outlier_detector = outlier_detector  #for outlier rejection
        self._nref_per_degree = nref_per_degree  #random subsets
        self._max_sample_size = max_sample_size  #sample size ceiling
        self._min_sample_size = min_sample_size  #sample size floor

        # exclude reflections that fail some inclusion criteria
        refs_to_keep = self._id_refs_to_keep(reflections)
        self._accepted_refs_size = len(refs_to_keep)

        # set entering flags for all reflections
        reflections['entering'] = calculate_entering_flags(
            reflections, self._experiments)

        # set observed frame numbers for all reflections if not already present
        calculate_frame_numbers(reflections, self._experiments)

        # reset all use flags
        self.reset_accepted_reflections(reflections)

        # put full list of indexed reflections aside and select only the reflections
        # that were not excluded to manage
        self._indexed = reflections
        self._reflections = reflections.select(flex.size_t(refs_to_keep))

        # set weights for all kept reflections
        if weighting_strategy_override is not None:
            self._weighting_strategy = weighting_strategy_override
        self._weighting_strategy.calculate_weights(self._reflections)

        # not known until the manager is finalised
        self._sample_size = None

        return
Example #18
0
  def run(self):
    '''Execute the script.'''
    from time import time
    import cPickle as pickle
    from logging import info
    from dials.util import log
    from dials.algorithms.refinement import RefinerFactory
    from dials.util.options import flatten_reflections, flatten_experiments

    start_time = time()

    # Parse the command line
    params, options = self.parser.parse_args(show_diff_phil=False)
    reflections = flatten_reflections(params.input.reflections)
    experiments = flatten_experiments(params.input.experiments)

    # Try to load the models and data
    nexp = len(experiments)
    if nexp == 0:
      print "No Experiments found in the input"
      self.parser.print_help()
      return
    if len(reflections) == 0:
      print "No reflection data found in the input"
      self.parser.print_help()
      return
    if len(reflections) > 1:
      raise Sorry("Only one reflections list can be imported at present")
    reflections = reflections[0]

    self.check_input(reflections)

    # Configure the logging
    log.config(info=params.output.log,
      debug=params.output.debug_log)
    from dials.util.version import dials_version
    info(dials_version())

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      info('The following parameters have been modified:\n')
      info(diff_phil)

    # Modify options if necessary
    if params.output.correlation_plot.filename is not None:
      params.refinement.refinery.track_parameter_correlation = True

    # Get the refiner
    info('Configuring refiner')
    refiner = RefinerFactory.from_parameters_data_experiments(params,
        reflections, experiments)

    # Refine the geometry
    if nexp == 1:
      info('Performing refinement of a single Experiment...')
    else:
      info('Performing refinement of {0} Experiments...'.format(nexp))

    # Refine and get the refinement history
    history = refiner.run()

    if params.output.centroids:
      info("Writing table of centroids to '{0}'".format(
        params.output.centroids))
      self.write_centroids_table(refiner, params.output.centroids)

    # Write scan-varying parameters to file, if there were any
    if params.output.parameter_table:
      scan = refiner.get_scan()
      if scan:
        text = refiner.get_param_reporter().varying_params_vs_image_number(
            scan.get_array_range())
        if text:
          info("Writing scan-varying parameter table to {0}".format(
            params.output.parameter_table))
          f = open(params.output.parameter_table,"w")
          f.write(text)
          f.close()
        else:
          info("No scan-varying parameter table to write")

    # get the refined experiments
    experiments = refiner.get_experiments()
    crystals = experiments.crystals()

    if len(crystals) == 1:
      # output the refined model for information
      info('')
      info('Final refined crystal model:')
      info(crystals[0])

    # Save the refined experiments to file
    output_experiments_filename = params.output.experiments
    info('Saving refined experiments to {0}'.format(output_experiments_filename))
    from dxtbx.model.experiment.experiment_list import ExperimentListDumper
    dump = ExperimentListDumper(experiments)
    dump.as_json(output_experiments_filename)

    # Save reflections with updated predictions if requested (allow to switch
    # this off if it is a time-consuming step)
    if params.output.reflections:
      # Update predictions for all indexed reflections
      info('Updating predictions for indexed reflections')
      preds = refiner.predict_for_indexed()

      # just copy over the columns of interest, leaving behind things
      # added by e.g. scan-varying refinement such as 'block', the
      # U, B and UB matrices and gradients.
      reflections['s1'] = preds['s1']
      reflections['xyzcal.mm'] = preds['xyzcal.mm']
      reflections['xyzcal.px'] = preds['xyzcal.px']
      if preds.has_key('entering'):
        reflections['entering'] = preds['entering']

      # set used_in_refinement and centroid_outlier flags
      assert len(preds) == len(reflections)
      reflections.unset_flags(flex.size_t_range(len(reflections)),
        reflections.flags.used_in_refinement | reflections.flags.centroid_outlier)
      mask = preds.get_flags(preds.flags.centroid_outlier)
      reflections.set_flags(mask, reflections.flags.centroid_outlier)
      mask = preds.get_flags(preds.flags.used_in_refinement)
      reflections.set_flags(mask, reflections.flags.used_in_refinement)

      info('Saving reflections with updated predictions to {0}'.format(
        params.output.reflections))
      if params.output.include_unused_reflections:
        reflections.as_pickle(params.output.reflections)
      else:
        sel = reflections.get_flags(reflections.flags.used_in_refinement)
        reflections.select(sel).as_pickle(params.output.reflections)

    # For debugging, if requested save matches to file
    if params.output.matches:
      matches = refiner.get_matches()
      info('Saving matches (use for debugging purposes) to {0}'.format(
        params.output.matches))
      matches.as_pickle(params.output.matches)

    # Correlation plot
    if params.output.correlation_plot.filename is not None:
      from os.path import splitext
      root, ext = splitext(params.output.correlation_plot.filename)
      if not ext: ext = ".pdf"

      steps = params.output.correlation_plot.steps
      if steps is None: steps = [history.get_nrows()-1]

      # extract individual column names or indices
      col_select = params.output.correlation_plot.col_select

      num_plots = 0
      for step in steps:
        fname_base = root + "_step%02d" % step
        plot_fname = fname_base + ext
        corrmat, labels = refiner.get_parameter_correlation_matrix(step, col_select)
        if [corrmat, labels].count(None) == 0:
          from dials.algorithms.refinement.refinement_helpers import corrgram
          plt = corrgram(corrmat, labels)
          if plt is not None:
            info('Saving parameter correlation plot to {}'.format(plot_fname))
            plt.savefig(plot_fname)
            num_plots += 1
          mat_fname = fname_base + ".pickle"
          with open(mat_fname, 'wb') as handle:
            py_mat = corrmat.as_scitbx_matrix() #convert to pickle-friendly form
            info('Saving parameter correlation matrix to {0}'.format(mat_fname))
            pickle.dump({'corrmat':py_mat, 'labels':labels}, handle)

      if num_plots == 0:
        msg = "Sorry, no parameter correlation plots were produced. Please set " \
              "track_parameter_correlation=True to ensure correlations are " \
              "tracked, and make sure correlation_plot.col_select is valid."
        info(msg)

    # Write out refinement history, if requested
    if params.output.history:
      with open(params.output.history, 'wb') as handle:
        info('Saving refinement step history to {0}'.format(
          params.output.history))
        pickle.dump(history, handle)

    # Log the total time taken
    info("\nTotal time taken: {0:.2f}s".format(time() - start_time))

    return
def run():

    parser = OptionParser(
        read_experiments=True,
        read_reflections=True,
        check_format=False,
        epilog=__doc__,
    )
    params, _, args = parser.parse_args(show_diff_phil=False,
                                        return_unhandled=True)
    log.config(verbosity=1, logfile="dials.cluster_filter.log")
    logger.info(dials_version())

    diff_phil = parser.diff_phil.as_str()
    if diff_phil:
        logger.info("The following parameters have been modified:\n%s",
                    diff_phil)

    reflections, expts = reflections_and_experiments_from_files(
        params.input.reflections, params.input.experiments)
    refls = reflections[0]

    refls["intensity"] = refls["intensity.scale.value"]
    refls["variance"] = refls["intensity.scale.variance"]
    refls["initial_index"] = flex.size_t_range(refls.size())
    good_refls = refls.select(refls.get_flags(refls.flags.scaled))

    Ih_table = IhTable(
        [good_refls],
        space_group=expts[0].crystal.get_space_group(),
        indices_lists=[good_refls["initial_index"]],
    )
    block = Ih_table.blocked_data_list[0]

    to_exclude = flex.size_t([])
    for group_idx in range(0, block.n_groups):

        sel = flex.bool(block.n_groups, False)
        sel[group_idx] = True
        sel_block = block.select_on_groups(sel)

        sel = sel_block.intensities / (sel_block.variances**0.5) > -1.0
        sel_block = sel_block.select(sel)
        if sel_block.size:
            I = sel_block.intensities / sel_block.inverse_scale_factors
            V = sel_block.variances / (sel_block.inverse_scale_factors**2)
            result = test_group(I, V, sel_block.asu_miller_index[0])
            if result:
                in_real = result[0]
                to_exclude.extend(
                    flumpy.from_numpy(
                        sel_block.Ih_table["loc_indices"].to_numpy()).select(
                            ~in_real))

    logger.info(to_exclude.size())
    logger.info(refls.size())

    bad = flex.bool(refls.size(), False)
    bad.set_selected(to_exclude, True)
    refls = refls.select(~bad)
    refls.as_file("filtered.refl")
    logger.info("Done")
Example #20
0
    def _id_refs_to_keep(self, obs_data):
        """Create a selection of observations that pass certain conditions.

        This step includes rejection of reflections too close to the spindle,
        reflections measured outside the scan range, rejection of the (0,0,0)
        Miller index and rejection of reflections with the overload flag set.
        Outlier rejection is done later."""

        # first exclude reflections with miller index set to 0,0,0
        sel1 = obs_data["miller_index"] != (0, 0, 0)

        # exclude reflections with overloads, as these have worse centroids
        sel2 = ~obs_data.get_flags(obs_data.flags.overloaded)

        # combine selections
        sel = sel1 & sel2
        inc = flex.size_t_range(len(obs_data)).select(sel)
        obs_data = obs_data.select(sel)

        # Default to True to pass the following test if there is no rotation axis
        # for a particular experiment
        to_keep = flex.bool(len(inc), True)

        for iexp, exp in enumerate(self._experiments):
            axis = self._axes[iexp]
            if not axis or exp.scan is None:
                continue
            if exp.scan.is_still():
                continue
            sel = obs_data["id"] == iexp
            s0 = self._s0vecs[iexp]
            s1 = obs_data["s1"].select(sel)
            phi = obs_data["xyzobs.mm.value"].parts()[2].select(sel)

            # first test: reject reflections for which the parallelepiped formed
            # between the gonio axis, s0 and s1 has a volume of less than the cutoff.
            # Those reflections are by definition closer to the spindle-beam
            # plane and for low values of the cutoff are troublesome to
            # integrate anyway.
            p_vol = flex.abs(s1.cross(flex.vec3_double(s1.size(), s0)).dot(axis))
            passed1 = p_vol > self._close_to_spindle_cutoff

            # second test: reject reflections that lie outside the scan range
            passed2 = exp.scan.is_angle_valid(phi, deg=False)

            # sanity check to catch a mutilated scan that does not make sense
            if passed2.count(True) == 0:
                raise DialsRefineConfigError(
                    "Experiment id {} contains no reflections with valid "
                    "scan angles".format(iexp)
                )

            # combine tests so far
            to_update = passed1 & passed2

            # third test: reject reflections close to the centres of the first and
            # last images in the scan
            if self._scan_margin > 0.0:
                edge1, edge2 = [e + 0.5 for e in exp.scan.get_image_range()]
                edge1 = exp.scan.get_angle_from_image_index(edge1, deg=False)
                edge1 += self._scan_margin
                edge2 = exp.scan.get_angle_from_image_index(edge2, deg=False)
                edge2 -= self._scan_margin
                passed3 = (edge1 <= phi) & (phi <= edge2)

                # combine the last test only if there would be a reasonable number of
                # reflections left for refinement
                tmp = to_update
                to_update = to_update & passed3
                if to_update.count(True) < 40:
                    logger.warning(
                        "Too few reflections to trim centroids from the scan "
                        "edges. Resetting scan_margin=0.0"
                    )
                    to_update = tmp

            # make selection
            to_keep.set_selected(sel, to_update)

        inc = inc.select(to_keep)

        return inc
Example #21
0
def combine(datablock_list, reflections_list, params):
  '''
  Combine the found spots.

  '''
  from dxtbx.datablock import BeamComparison
  from dxtbx.datablock import DetectorComparison
  from dxtbx.datablock import GoniometerComparison
  from dxtbx.datablock import DataBlock
  from dxtbx.imageset import ImageSetFactory
  from dials.algorithms.spot_finding import StrongSpotCombiner
  from dials.array_family import flex
  assert len(datablock_list) == len(reflections_list)

  # Get a list of imagesets
  imageset_list = []
  for db in datablock_list:
    iset = db.extract_imagesets()
    assert len(iset) == 1
    imageset_list.append(iset[0])

  compare_beam = BeamComparison(
    wavelength_tolerance=params.input.tolerance.beam.wavelength,
    direction_tolerance=params.input.tolerance.beam.direction,
    polarization_normal_tolerance=params.input.tolerance.beam.polarization_normal,
    polarization_fraction_tolerance=params.input.tolerance.beam.polarization_fraction)
  compare_detector = DetectorComparison(
    fast_axis_tolerance=params.input.tolerance.detector.fast_axis,
    slow_axis_tolerance=params.input.tolerance.detector.slow_axis,
    origin_tolerance=params.input.tolerance.detector.origin)
  compare_goniometer = GoniometerComparison(
    rotation_axis_tolerance=params.input.tolerance.goniometer.rotation_axis,
    fixed_rotation_tolerance=params.input.tolerance.goniometer.fixed_rotation,
    setting_rotation_tolerance=params.input.tolerance.goniometer.setting_rotation)
  scan_tolerance = params.input.tolerance.scan.oscillation

  # The initial models
  format_class = imageset_list[0].get_format_class()
  beam = imageset_list[0].get_beam()
  detector = imageset_list[0].get_detector()
  goniometer = imageset_list[0].get_goniometer()
  scan = imageset_list[0].get_scan()
  template = imageset_list[0].get_template()

  # Check all the models
  for imageset in imageset_list[1:]:
    b = imageset.get_beam()
    d = imageset.get_detector()
    g = imageset.get_goniometer()
    s = imageset.get_scan()
    if not imageset.get_format_class() == format_class:
      raise RuntimeError('Format classes do not match')
    if not imageset.get_template() == template:
      raise RuntimeError('Templates do not match')
    if not compare_beam(beam, b):
      raise RuntimeError('Beam models are too dissimilar')
    if not compare_detector(detector, d):
      raise RuntimeError('Detector models are too dissimilar')
    if not compare_goniometer(goniometer, g):
      raise RuntimeError('Goniometer models are too dissimilar')
    try:
      scan.append(s, scan_tolerance=scan_tolerance)
    except Exception:
      raise RuntimeError('Scans do not match')

  # Get the image range
  image_range = scan.get_image_range()
  image_range = (image_range[0], image_range[1]+1)

  # Create the sweep
  imageset = ImageSetFactory.make_sweep(
    template, range(*image_range),
    format_class,
    beam, detector,
    goniometer, scan)

  # Combine spots
  combiner = StrongSpotCombiner()
  for index, rlist in enumerate(reflections_list, start=1):
    assert rlist['id'].all_eq(0)
    logger.info("Combining %d reflections from reflection list %d" % (
      len(rlist),
      index))
    combiner.add(rlist['shoebox'])
  shoeboxes = combiner.shoeboxes()

  # Calculate the spot centroids and intensities
  logger.info('Combined into %d reflections' % len(shoeboxes))
  centroid = shoeboxes.centroid_valid()
  logger.info('Calculated {0} spot centroids'.format(len(shoeboxes)))
  intensity = shoeboxes.summed_intensity()
  logger.info('Calculated {0} spot intensities'.format(len(shoeboxes)))

  # Construct the reflection table
  reflections = flex.reflection_table(
    flex.observation(
      shoeboxes.panels(),
      centroid,
      intensity),
    shoeboxes)
  reflections['id'] = flex.int(len(reflections), 0)
  reflections.set_flags(
    flex.size_t_range(len(reflections)),
    reflections.flags.strong)

  # Return the datablock and reflections
  return DataBlock([imageset]), reflections
 def sort_permutation(column, reverse=False):
     indices = flex.size_t_range(len(column))
     perm = sorted(indices, key=lambda k: column[k], reverse=reverse)
     return flex.size_t(perm)
Example #23
0
def export_xds_ascii(integrated_data, experiment_list, hklout, summation=False,
                     include_partials=False, keep_partials=False, var_model=(1,0)):
  '''Export data from integrated_data corresponding to experiment_list to
  an XDS_ASCII.HKL formatted text file.'''

  from dials.array_family import flex
  import math

  # for the moment assume (and assert) that we will convert data from exactly
  # one lattice...

  assert(len(experiment_list) == 1)
  # select reflections that are assigned to an experiment (i.e. non-negative id)

  integrated_data = integrated_data.select(integrated_data['id'] >= 0)
  assert max(integrated_data['id']) == 0

  if not summation:
    assert('intensity.prf.value' in integrated_data)

  if 'intensity.prf.variance' in integrated_data:
    selection = integrated_data.get_flags(
      integrated_data.flags.integrated,
      all=True)
  else:
    selection = integrated_data.get_flags(
      integrated_data.flags.integrated_sum)
  integrated_data = integrated_data.select(selection)

  selection = integrated_data['intensity.sum.variance'] <= 0
  if selection.count(True) > 0:
    integrated_data.del_selected(selection)
    logger.info('Removing %d reflections with negative variance' % \
          selection.count(True))

  if 'intensity.prf.variance' in integrated_data:
    selection = integrated_data['intensity.prf.variance'] <= 0
    if selection.count(True) > 0:
      integrated_data.del_selected(selection)
      logger.info('Removing %d profile reflections with negative variance' % \
            selection.count(True))

  if include_partials:
    integrated_data = sum_partial_reflections(integrated_data)
    integrated_data = scale_partial_reflections(integrated_data)

  if 'partiality' in integrated_data:
    selection = integrated_data['partiality'] < 0.99
    if selection.count(True) > 0 and not keep_partials:
      integrated_data.del_selected(selection)
      logger.info('Removing %d incomplete reflections' % \
        selection.count(True))

  experiment = experiment_list[0]

  # sort data before output
  nref = len(integrated_data['miller_index'])
  indices = flex.size_t_range(nref)

  import copy
  unique = copy.deepcopy(integrated_data['miller_index'])
  from cctbx.miller import map_to_asu
  map_to_asu(experiment.crystal.get_space_group().type(), False, unique)

  perm = sorted(indices, key=lambda k: unique[k])
  integrated_data = integrated_data.select(flex.size_t(perm))

  from scitbx import matrix
  from rstbx.cftbx.coordinate_frame_helpers import align_reference_frame

  assert (not experiment.goniometer is None)

  unit_cell = experiment.crystal.get_unit_cell()

  from scitbx.array_family import flex
  from math import floor, sqrt

  assert(not experiment.scan is None)
  image_range = experiment.scan.get_image_range()
  phi_start, phi_range = experiment.scan.get_image_oscillation(image_range[0])

  # gather the required information for the reflection file

  nref = len(integrated_data['miller_index'])
  zdet = flex.double(integrated_data['xyzcal.px'].parts()[2])

  miller_index = integrated_data['miller_index']

  I = None
  sigI = None

  # export including scale factors

  if 'lp' in integrated_data:
    lp = integrated_data['lp']
  else:
    lp = flex.double(nref, 1.0)
  if 'dqe' in integrated_data:
    dqe = integrated_data['dqe']
  else:
    dqe = flex.double(nref, 1.0)
  scl = lp / dqe

  # profile correlation
  if 'profile.correlation' in integrated_data:
    prof_corr = 100.0 * integrated_data['profile.correlation']
  else:
    prof_corr = flex.double(nref, 100.0)

  # partiality
  if 'partiality' in integrated_data:
    partiality = 100 * integrated_data['partiality']
  else:
    prof_corr = flex.double(nref, 100.0)

  if summation:
    I = integrated_data['intensity.sum.value'] * scl
    V = integrated_data['intensity.sum.variance'] * scl * scl
    assert V.all_gt(0)
    V = var_model[0] * (V + var_model[1] * I * I)
    sigI = flex.sqrt(V)
  else:
    I = integrated_data['intensity.prf.value'] * scl
    V = integrated_data['intensity.prf.variance'] * scl * scl
    assert V.all_gt(0)
    V = var_model[0] * (V + var_model[1] * I * I)
    sigI = flex.sqrt(V)

  fout = open(hklout, 'w')

  # first write the header - in the "standard" coordinate frame...

  panel = experiment.detector[0]
  fast = panel.get_fast_axis()
  slow = panel.get_slow_axis()
  Rd = align_reference_frame(fast, (1,0,0), slow, (0,1,0))
  print 'Coordinate change:'
  print '%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n' % Rd.elems

  fast = Rd * fast
  slow = Rd * slow

  qx, qy = panel.get_pixel_size()
  nx, ny = panel.get_image_size()
  distance = matrix.col(Rd * panel.get_origin()).dot(
      matrix.col(Rd * panel.get_normal()))
  org = Rd * (matrix.col(panel.get_origin()) - distance * matrix.col(
      panel.get_normal()))
  orgx = - org.dot(fast) / qx
  orgy = - org.dot(slow) / qy

  UB = Rd * matrix.sqr(experiment.crystal.get_A())
  real_space_ABC = UB.inverse().elems

  axis = Rd * experiment.goniometer.get_rotation_axis()
  beam = Rd * experiment.beam.get_s0()
  cell_fmt = '%9.3f %9.3f %9.3f %7.3f %7.3f %7.3f'
  axis_fmt = '%9.3f %9.3f %9.3f'

  fout.write('\n'.join([
    '!FORMAT=XDS_ASCII    MERGE=FALSE    FRIEDEL\'S_LAW=TRUE',
    '!Generated by dials.export',
    '!DATA_RANGE= %d %d' % image_range,
    '!ROTATION_AXIS= %9.6f %9.6f %9.6f' % axis.elems,
    '!OSCILLATION_RANGE= %f' % phi_range,
    '!STARTING_ANGLE= %f' % phi_start,
    '!STARTING_FRAME= %d' % image_range[0],
    '!SPACE_GROUP_NUMBER= %d' % experiment.crystal.get_space_group().type().number(),
    '!UNIT_CELL_CONSTANTS= %s' % (cell_fmt % unit_cell.parameters()),
    '!UNIT_CELL_A-AXIS= %s' % (axis_fmt % real_space_ABC[0:3]),
    '!UNIT_CELL_B-AXIS= %s' % (axis_fmt % real_space_ABC[3:6]),
    '!UNIT_CELL_C-AXIS= %s' % (axis_fmt % real_space_ABC[6:9]),
    '!X-RAY_WAVELENGTH= %f' % experiment.beam.get_wavelength(),
    '!INCIDENT_BEAM_DIRECTION= %f %f %f' % beam.elems,
    '!NX= %d NY= %d QX= %f QY= %f' % (nx, ny, qx, qy),
    '!ORGX= %9.2f ORGY= %9.2f' % (orgx, orgy),
    '!DETECTOR_DISTANCE= %8.3f' % distance,
    '!DIRECTION_OF_DETECTOR_X-AXIS= %9.5f %9.5f %9.5f' % fast.elems,
    '!DIRECTION_OF_DETECTOR_Y-AXIS= %9.5f %9.5f %9.5f' % slow.elems,
    '!VARIANCE_MODEL= %7.3e %7.3e' % var_model,
    '!NUMBER_OF_ITEMS_IN_EACH_DATA_RECORD=12',
    '!ITEM_H=1',
    '!ITEM_K=2',
    '!ITEM_L=3',
    '!ITEM_IOBS=4',
    '!ITEM_SIGMA(IOBS)=5',
    '!ITEM_XD=6',
    '!ITEM_YD=7',
    '!ITEM_ZD=8',
    '!ITEM_RLP=9',
    '!ITEM_PEAK=10',
    '!ITEM_CORR=11',
    '!ITEM_PSI=12',
    '!END_OF_HEADER',
    '']))

  # then write the data records

  s0 = Rd * matrix.col(experiment.beam.get_s0())

  for j in range(nref):
    x, y, z = integrated_data['xyzcal.px'][j]
    phi = phi_start + z * phi_range
    h, k, l = miller_index[j]
    X = (UB * (h, k, l)).rotate(axis, phi, deg=True)
    s = s0 + X
    g = s.cross(s0).normalize()
    f = (s - s0).normalize()

    # find component of beam perpendicular to f, e
    e = - (s + s0).normalize()
    if h == k and k == l:
      u = (h, -h, 0)
    else:
      u = (k - l, l - h, h - k)
    q = (matrix.col(u).transpose() * UB.inverse()).normalize(
        ).transpose().rotate(axis, phi, deg=True)

    psi = q.angle(g, deg=True)
    if q.dot(e) < 0:
      psi *= -1

    fout.write('%d %d %d %f %f %f %f %f %f %.1f %.1f %f\n' %
               (h, k, l, I[j], sigI[j], x, y, z, scl[j], partiality[j], prof_corr[j], psi))

  fout.write('!END_OF_DATA\n')
  fout.close()
  logger.info('Output %d reflections to %s' % (nref, hklout))
  return
Example #24
0
 def sort_permutation(column, reverse=False):
   indices = flex.size_t_range(len(column))
   perm = sorted(indices, key=lambda k: column[k], reverse=reverse)
   return flex.size_t(perm)
Example #25
0
def _export_experiment(filename,
                       integrated_data,
                       experiment,
                       params,
                       var_model=(1, 0)):
    # type: (str, flex.reflection_table, dxtbx.model.Experiment, libtbx.phil.scope_extract, Tuple)
    """Export a single experiment to an XDS_ASCII.HKL format file.

    Args:
        filename: The file to write to
        integrated_data: The reflection table, pre-selected to one experiment
        experiment: The experiment list entry to export
        params: The PHIL configuration object
        var_model:
    """
    # export for xds_ascii should only be for non-scaled reflections
    assert any(i in integrated_data
               for i in ["intensity.sum.value", "intensity.prf.value"])
    # Handle requesting profile intensities (default via auto) but no column
    if "profile" in params.intensity and "intensity.prf.value" not in integrated_data:
        raise Sorry(
            "Requested profile intensity data but only summed present. Use intensity=sum."
        )

    integrated_data = filter_reflection_table(
        integrated_data,
        intensity_choice=params.intensity,
        partiality_threshold=params.mtz.partiality_threshold,
        combine_partials=params.mtz.combine_partials,
        min_isigi=params.mtz.min_isigi,
        filter_ice_rings=params.mtz.filter_ice_rings,
        d_min=params.mtz.d_min,
    )

    # calculate the scl = lp/dqe correction for outputting but don't apply it as
    # it has already been applied in filter_reflection_table
    (
        integrated_data,
        scl,
    ) = FilteringReductionMethods.calculate_lp_qe_correction_and_filter(
        integrated_data)

    # sort data before output
    nref = len(integrated_data["miller_index"])
    indices = flex.size_t_range(nref)

    unique = copy.deepcopy(integrated_data["miller_index"])

    map_to_asu(experiment.crystal.get_space_group().type(), False, unique)

    perm = sorted(indices, key=lambda k: unique[k])
    integrated_data = integrated_data.select(flex.size_t(perm))

    if experiment.goniometer is None:
        print(
            "Warning: No goniometer. Experimentally exporting with (1 0 0) axis"
        )

    unit_cell = experiment.crystal.get_unit_cell()

    if experiment.scan is None:
        print(
            "Warning: No Scan. Experimentally exporting no-oscillation values")
        image_range = (1, 1)
        phi_start, phi_range = 0.0, 0.0
    else:
        image_range = experiment.scan.get_image_range()
        phi_start, phi_range = experiment.scan.get_image_oscillation(
            image_range[0])

    # gather the required information for the reflection file

    nref = len(integrated_data["miller_index"])

    miller_index = integrated_data["miller_index"]

    # profile correlation
    if "profile.correlation" in integrated_data:
        prof_corr = 100.0 * integrated_data["profile.correlation"]
    else:
        prof_corr = flex.double(nref, 100.0)

    # partiality
    if "partiality" in integrated_data:
        partiality = 100 * integrated_data["partiality"]
    else:
        prof_corr = flex.double(nref, 100.0)

    if "intensity.sum.value" in integrated_data:
        I = integrated_data["intensity.sum.value"]
        V = integrated_data["intensity.sum.variance"]
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)
    else:
        I = integrated_data["intensity.prf.value"]
        V = integrated_data["intensity.prf.variance"]
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)

    fout = open(filename, "w")

    # first write the header - in the "standard" coordinate frame...

    panel = experiment.detector[0]
    fast = panel.get_fast_axis()
    slow = panel.get_slow_axis()
    Rd = align_reference_frame(fast, (1, 0, 0), slow, (0, 1, 0))
    print("Coordinate change:")
    print("%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n" %
          Rd.elems)

    fast = Rd * fast
    slow = Rd * slow

    qx, qy = panel.get_pixel_size()
    nx, ny = panel.get_image_size()
    distance = matrix.col(Rd * panel.get_origin()).dot(
        matrix.col(Rd * panel.get_normal()))
    org = Rd * (matrix.col(panel.get_origin()) -
                distance * matrix.col(panel.get_normal()))
    orgx = -org.dot(fast) / qx
    orgy = -org.dot(slow) / qy

    UB = Rd * matrix.sqr(experiment.crystal.get_A())
    real_space_ABC = UB.inverse().elems

    if experiment.goniometer is not None:
        axis = Rd * experiment.goniometer.get_rotation_axis()
    else:
        axis = Rd * (1, 0, 0)

    beam = Rd * experiment.beam.get_s0()
    cell_fmt = "%9.3f %9.3f %9.3f %7.3f %7.3f %7.3f"
    axis_fmt = "%9.3f %9.3f %9.3f"

    fout.write("\n".join([
        "!FORMAT=XDS_ASCII    MERGE=FALSE    FRIEDEL'S_LAW=TRUE",
        "!Generated by dials.export",
        "!DATA_RANGE= %d %d" % image_range,
        "!ROTATION_AXIS= %9.6f %9.6f %9.6f" % axis.elems,
        "!OSCILLATION_RANGE= %f" % phi_range,
        "!STARTING_ANGLE= %f" % phi_start,
        "!STARTING_FRAME= %d" % image_range[0],
        "!SPACE_GROUP_NUMBER= %d" %
        experiment.crystal.get_space_group().type().number(),
        "!UNIT_CELL_CONSTANTS= %s" % (cell_fmt % unit_cell.parameters()),
        "!UNIT_CELL_A-AXIS= %s" % (axis_fmt % real_space_ABC[0:3]),
        "!UNIT_CELL_B-AXIS= %s" % (axis_fmt % real_space_ABC[3:6]),
        "!UNIT_CELL_C-AXIS= %s" % (axis_fmt % real_space_ABC[6:9]),
        "!X-RAY_WAVELENGTH= %f" % experiment.beam.get_wavelength(),
        "!INCIDENT_BEAM_DIRECTION= %f %f %f" % beam.elems,
        "!NX= %d NY= %d QX= %f QY= %f" % (nx, ny, qx, qy),
        "!ORGX= %9.2f ORGY= %9.2f" % (orgx, orgy),
        "!DETECTOR_DISTANCE= %8.3f" % distance,
        "!DIRECTION_OF_DETECTOR_X-AXIS= %9.5f %9.5f %9.5f" % fast.elems,
        "!DIRECTION_OF_DETECTOR_Y-AXIS= %9.5f %9.5f %9.5f" % slow.elems,
        "!VARIANCE_MODEL= %7.3e %7.3e" % var_model,
        "!NUMBER_OF_ITEMS_IN_EACH_DATA_RECORD=12",
        "!ITEM_H=1",
        "!ITEM_K=2",
        "!ITEM_L=3",
        "!ITEM_IOBS=4",
        "!ITEM_SIGMA(IOBS)=5",
        "!ITEM_XD=6",
        "!ITEM_YD=7",
        "!ITEM_ZD=8",
        "!ITEM_RLP=9",
        "!ITEM_PEAK=10",
        "!ITEM_CORR=11",
        "!ITEM_PSI=12",
        "!END_OF_HEADER",
        "",
    ]))

    # then write the data records

    s0 = Rd * matrix.col(experiment.beam.get_s0())

    for j in range(nref):
        x, y, z = integrated_data["xyzcal.px"][j]
        phi = phi_start + z * phi_range
        h, k, l = miller_index[j]
        X = (UB * (h, k, l)).rotate(axis, phi, deg=True)
        s = s0 + X
        g = s.cross(s0).normalize()

        # find component of beam perpendicular to f, e
        e = -(s + s0).normalize()
        if h == k and k == l:
            u = (h, -h, 0)
        else:
            u = (k - l, l - h, h - k)
        q = ((matrix.col(u).transpose() *
              UB.inverse()).normalize().transpose().rotate(axis, phi,
                                                           deg=True))

        psi = q.angle(g, deg=True)
        if q.dot(e) < 0:
            psi *= -1

        fout.write("%d %d %d %f %f %f %f %f %f %.1f %.1f %f\n" % (
            h,
            k,
            l,
            I[j],
            sigI[j],
            x,
            y,
            z,
            scl[j],
            partiality[j],
            prof_corr[j],
            psi,
        ))

    fout.write("!END_OF_DATA\n")
    fout.close()
    logger.info("Output %d reflections to %s" % (nref, filename))
Example #26
0
def combine(datablock_list, reflections_list, params):
  '''
  Combine the found spots.

  '''
  from dxtbx.datablock import BeamComparison
  from dxtbx.datablock import DetectorComparison
  from dxtbx.datablock import GoniometerComparison
  from dxtbx.datablock import DataBlock
  from dxtbx.imageset import ImageSetFactory
  from dials.algorithms.spot_finding import StrongSpotCombiner
  from logging import info
  from dials.array_family import flex
  assert len(datablock_list) == len(reflections_list)

  # Get a list of imagesets
  imageset_list = []
  for db in datablock_list:
    iset = db.extract_imagesets()
    assert len(iset) == 1
    imageset_list.append(iset[0])

  compare_beam = BeamComparison(
    wavelength_tolerance=params.input.tolerance.beam.wavelength,
    direction_tolerance=params.input.tolerance.beam.direction,
    polarization_normal_tolerance=params.input.tolerance.beam.polarization_normal,
    polarization_fraction_tolerance=params.input.tolerance.beam.polarization_fraction)
  compare_detector = DetectorComparison(
    fast_axis_tolerance=params.input.tolerance.detector.fast_axis,
    slow_axis_tolerance=params.input.tolerance.detector.slow_axis,
    origin_tolerance=params.input.tolerance.detector.origin)
  compare_goniometer = GoniometerComparison(
    rotation_axis_tolerance=params.input.tolerance.goniometer.rotation_axis,
    fixed_rotation_tolerance=params.input.tolerance.goniometer.fixed_rotation,
    setting_rotation_tolerance=params.input.tolerance.goniometer.setting_rotation)
  scan_tolerance = params.input.tolerance.scan.oscillation

  # The initial models
  format_class = imageset_list[0].reader().get_format_class()
  beam = imageset_list[0].get_beam()
  detector = imageset_list[0].get_detector()
  goniometer = imageset_list[0].get_goniometer()
  scan = imageset_list[0].get_scan()
  template = imageset_list[0].get_template()

  # Check all the models
  for imageset in imageset_list[1:]:
    b = imageset.get_beam()
    d = imageset.get_detector()
    g = imageset.get_goniometer()
    s = imageset.get_scan()
    if not imageset.reader().get_format_class() == format_class:
      raise RuntimeError('Format classes do not match')
    if not imageset.get_template() == template:
      raise RuntimeError('Templates do not match')
    if not compare_beam(beam, b):
      raise RuntimeError('Beam models are too dissimilar')
    if not compare_detector(detector, d):
      raise RuntimeError('Detector models are too dissimilar')
    if not compare_goniometer(goniometer, g):
      raise RuntimeError('Goniometer models are too dissimilar')
    try:
      scan.append(s, scan_tolerance=scan_tolerance)
    except Exception:
      raise RuntimeError('Scans do not match')

  # Get the image range
  image_range = scan.get_image_range()
  image_range = (image_range[0], image_range[1]+1)

  # Create the sweep
  imageset = ImageSetFactory.make_sweep(
    template, range(*image_range),
    format_class,
    beam, detector,
    goniometer, scan)

  # Combine spots
  combiner = StrongSpotCombiner()
  for index, rlist in enumerate(reflections_list, start=1):
    assert rlist['id'].all_eq(0)
    info("Combining %d reflections from reflection list %d" % (
      len(rlist),
      index))
    combiner.add(rlist['shoebox'])
  shoeboxes = combiner.shoeboxes()

  # Calculate the spot centroids and intensities
  info('Combined into %d reflections' % len(shoeboxes))
  centroid = shoeboxes.centroid_valid()
  info('Calculated {0} spot centroids'.format(len(shoeboxes)))
  intensity = shoeboxes.summed_intensity()
  info('Calculated {0} spot intensities'.format(len(shoeboxes)))

  # Construct the reflection table
  reflections = flex.reflection_table(
    flex.observation(
      shoeboxes.panels(),
      centroid,
      intensity),
    shoeboxes)
  reflections['id'] = flex.int(len(reflections), 0)
  reflections.set_flags(
    flex.size_t_range(len(reflections)),
    reflections.flags.strong)

  # Return the datablock and reflections
  return DataBlock([imageset]), reflections
Example #27
0
def run_macrocycle(params, reflections, experiments):
    """Run one macrocycle of refinement.

    One macrocycle of refinement is run, as specified by the PHIL
    parameters, using the centroids from the supplied reflections
    and the initial experimental geometry taken from experiments.


    Args:
        params: The working PHIL parameters.
        reflections: A reflection table containing observed centroids
        experiments: The initial dxtbx experimental geometry models

    Returns:
        tuple: The Refiner, the reflection table with updated predictions
            and flags, and the refinement history object.
    """
    # Get the refiner
    logger.info("Configuring refiner")
    try:
        refiner = RefinerFactory.from_parameters_data_experiments(
            params, reflections, experiments
        )
    except DialsRefineConfigError as e:
        sys.exit(str(e))

    # Refine the geometry
    nexp = len(experiments)
    if nexp == 1:
        logger.info("Performing refinement of a single Experiment...")
    else:
        logger.info("Performing refinement of {} Experiments...".format(nexp))

    # Refine and get the refinement history
    try:
        history = refiner.run()
    except DialsRefineRuntimeError as e:
        sys.exit(str(e))

    # Update predictions for all indexed reflections
    logger.info("Updating predictions for indexed reflections")
    preds = refiner.predict_for_indexed()

    # just copy over the columns of interest or columns that may have been
    # updated, leaving behind things added by e.g. scan-varying refinement
    # such as 'block', the U, B and UB matrices and gradients.
    for key in preds:
        if key in reflections.keys() or key in [
            "s1",
            "xyzcal.mm",
            "xyzcal.px",
            "entering",
            "delpsical.rad",
        ]:
            reflections[key] = preds[key]

    # set refinement flags
    assert len(preds) == len(reflections)
    reflections.unset_flags(
        flex.size_t_range(len(reflections)),
        reflections.flags.excluded_for_refinement
        | reflections.flags.used_in_refinement
        | reflections.flags.centroid_outlier
        | reflections.flags.predicted,
    )
    reflections.set_flags(
        preds.get_flags(preds.flags.excluded_for_refinement),
        reflections.flags.excluded_for_refinement,
    )
    reflections.set_flags(
        preds.get_flags(preds.flags.centroid_outlier),
        reflections.flags.centroid_outlier,
    )
    reflections.set_flags(
        preds.get_flags(preds.flags.used_in_refinement),
        reflections.flags.used_in_refinement,
    )
    reflections.set_flags(
        preds.get_flags(preds.flags.predicted), reflections.flags.predicted
    )

    return refiner, reflections, history
Example #28
0
    def run(self):
        '''Execute the script.'''
        from time import time
        import cPickle as pickle
        from dials.util import log
        from dials.algorithms.refinement import RefinerFactory
        from dials.util.options import flatten_reflections, flatten_experiments

        start_time = time()

        # Parse the command line
        params, options = self.parser.parse_args(show_diff_phil=False)
        reflections = flatten_reflections(params.input.reflections)
        experiments = flatten_experiments(params.input.experiments)

        # Try to load the models and data
        nexp = len(experiments)
        if nexp == 0:
            print "No Experiments found in the input"
            self.parser.print_help()
            return
        if len(reflections) == 0:
            print "No reflection data found in the input"
            self.parser.print_help()
            return
        if len(reflections) > 1:
            raise Sorry("Only one reflections list can be imported at present")
        reflections = reflections[0]

        self.check_input(reflections)

        # Configure the logging
        log.config(info=params.output.log, debug=params.output.debug_log)
        from dials.util.version import dials_version
        logger.info(dials_version())

        # Log the diff phil
        diff_phil = self.parser.diff_phil.as_str()
        if diff_phil is not '':
            logger.info('The following parameters have been modified:\n')
            logger.info(diff_phil)

        # Modify options if necessary
        if params.output.correlation_plot.filename is not None:
            params.refinement.refinery.track_parameter_correlation = True

        # Warn about potentially unhelpful options
        if params.refinement.mp.nproc > 1:
            logger.warning(
                "WARNING: setting nproc > 1 is only helpful in rare "
                "circumstances. It is not recommended for typical data processing "
                "tasks.\n")

        # Get the refiner
        logger.info('Configuring refiner')
        refiner = RefinerFactory.from_parameters_data_experiments(
            params, reflections, experiments)

        # Refine the geometry
        if nexp == 1:
            logger.info('Performing refinement of a single Experiment...')
        else:
            logger.info(
                'Performing refinement of {0} Experiments...'.format(nexp))

        # Refine and get the refinement history
        history = refiner.run()

        if params.output.centroids:
            logger.info("Writing table of centroids to '{0}'".format(
                params.output.centroids))
            self.write_centroids_table(refiner, params.output.centroids)

        # Get the refined experiments
        experiments = refiner.get_experiments()

        # Write scan-varying parameters to file, if there were any
        if params.output.parameter_table:
            scans = experiments.scans()
            if len(scans) > 1:
                logger.info(
                    "Writing a scan-varying parameter table is only supported "
                    "for refinement of a single scan")
            else:
                scan = scans[0]
                text = refiner.get_param_reporter(
                ).varying_params_vs_image_number(scan.get_array_range())
                if text:
                    logger.info(
                        "Writing scan-varying parameter table to {0}".format(
                            params.output.parameter_table))
                    f = open(params.output.parameter_table, "w")
                    f.write(text)
                    f.close()
                else:
                    logger.info("No scan-varying parameter table to write")

        crystals = experiments.crystals()
        if len(crystals) == 1:
            # output the refined model for information
            logger.info('')
            logger.info('Final refined crystal model:')
            logger.info(crystals[0])

        # Save the refined experiments to file
        output_experiments_filename = params.output.experiments
        logger.info('Saving refined experiments to {0}'.format(
            output_experiments_filename))
        from dxtbx.model.experiment_list import ExperimentListDumper
        dump = ExperimentListDumper(experiments)
        dump.as_json(output_experiments_filename)

        # Save reflections with updated predictions if requested (allow to switch
        # this off if it is a time-consuming step)
        if params.output.reflections:
            # Update predictions for all indexed reflections
            logger.info('Updating predictions for indexed reflections')
            preds = refiner.predict_for_indexed()

            # just copy over the columns of interest, leaving behind things
            # added by e.g. scan-varying refinement such as 'block', the
            # U, B and UB matrices and gradients.
            reflections['s1'] = preds['s1']
            reflections['xyzcal.mm'] = preds['xyzcal.mm']
            reflections['xyzcal.px'] = preds['xyzcal.px']
            if 'entering' in preds:
                reflections['entering'] = preds['entering']

            # set used_in_refinement and centroid_outlier flags
            assert len(preds) == len(reflections)
            reflections.unset_flags(
                flex.size_t_range(len(reflections)),
                reflections.flags.used_in_refinement
                | reflections.flags.centroid_outlier)
            mask = preds.get_flags(preds.flags.centroid_outlier)
            reflections.set_flags(mask, reflections.flags.centroid_outlier)
            mask = preds.get_flags(preds.flags.used_in_refinement)
            reflections.set_flags(mask, reflections.flags.used_in_refinement)

            logger.info(
                'Saving reflections with updated predictions to {0}'.format(
                    params.output.reflections))
            if params.output.include_unused_reflections:
                reflections.as_pickle(params.output.reflections)
            else:
                sel = reflections.get_flags(
                    reflections.flags.used_in_refinement)
                reflections.select(sel).as_pickle(params.output.reflections)

        # For debugging, if requested save matches to file
        if params.output.matches:
            matches = refiner.get_matches()
            logger.info(
                'Saving matches (use for debugging purposes) to {0}'.format(
                    params.output.matches))
            matches.as_pickle(params.output.matches)

        # Correlation plot
        if params.output.correlation_plot.filename is not None:
            from os.path import splitext
            root, ext = splitext(params.output.correlation_plot.filename)
            if not ext: ext = ".pdf"

            steps = params.output.correlation_plot.steps
            if steps is None: steps = [history.get_nrows() - 1]

            # extract individual column names or indices
            col_select = params.output.correlation_plot.col_select

            num_plots = 0
            for step in steps:
                fname_base = root
                if len(steps) > 1: fname_base += "_step%02d" % step

                corrmats, labels = refiner.get_parameter_correlation_matrix(
                    step, col_select)
                if [corrmats, labels].count(None) == 0:
                    from dials.algorithms.refinement.refinement_helpers import corrgram
                    for resid_name, corrmat in corrmats.items():
                        plot_fname = fname_base + "_" + resid_name + ext
                        plt = corrgram(corrmat, labels)
                        if plt is not None:
                            logger.info(
                                'Saving parameter correlation plot to {}'.
                                format(plot_fname))
                            plt.savefig(plot_fname)
                            plt.close()
                            num_plots += 1
                    mat_fname = fname_base + ".pickle"
                    with open(mat_fname, 'wb') as handle:
                        for k, corrmat in corrmats.items():
                            corrmats[k] = corrmat.as_scitbx_matrix()
                        logger.info(
                            'Saving parameter correlation matrices to {0}'.
                            format(mat_fname))
                        pickle.dump({
                            'corrmats': corrmats,
                            'labels': labels
                        }, handle)

            if num_plots == 0:
                msg = "Sorry, no parameter correlation plots were produced. Please set " \
                      "track_parameter_correlation=True to ensure correlations are " \
                      "tracked, and make sure correlation_plot.col_select is valid."
                logger.info(msg)

        # Write out refinement history, if requested
        if params.output.history:
            with open(params.output.history, 'wb') as handle:
                logger.info('Saving refinement step history to {0}'.format(
                    params.output.history))
                pickle.dump(history, handle)

        # Log the total time taken
        logger.info("\nTotal time taken: {0:.2f}s".format(time() - start_time))

        return
Example #29
0
def export_sadabs(integrated_data,
                  experiment_list,
                  hklout,
                  run=0,
                  summation=False,
                  include_partials=False,
                  keep_partials=False,
                  debug=False,
                  predict=True):
    '''Export data from integrated_data corresponding to experiment_list to a
  file for input to SADABS. FIXME probably need to make a .p4p file as
  well...'''

    from dials.array_family import flex
    from scitbx import matrix
    import math

    # for the moment assume (and assert) that we will convert data from exactly
    # one lattice...

    assert (len(experiment_list) == 1)
    # select reflections that are assigned to an experiment (i.e. non-negative id)

    integrated_data = integrated_data.select(integrated_data['id'] >= 0)
    assert max(integrated_data['id']) == 0

    if not summation:
        assert ('intensity.prf.value' in integrated_data)

    # strip out negative variance reflections: these should not really be there
    # FIXME Doing select on summation results. Should do on profile result if
    # present? Yes

    if 'intensity.prf.variance' in integrated_data:
        selection = integrated_data.get_flags(integrated_data.flags.integrated,
                                              all=True)
    else:
        selection = integrated_data.get_flags(
            integrated_data.flags.integrated_sum)
    integrated_data = integrated_data.select(selection)

    selection = integrated_data['intensity.sum.variance'] <= 0
    if selection.count(True) > 0:
        integrated_data.del_selected(selection)
        logger.info('Removing %d reflections with negative variance' % \
              selection.count(True))

    if 'intensity.prf.variance' in integrated_data:
        selection = integrated_data['intensity.prf.variance'] <= 0
        if selection.count(True) > 0:
            integrated_data.del_selected(selection)
            logger.info('Removing %d profile reflections with negative variance' % \
                  selection.count(True))

    if include_partials:
        integrated_data = sum_partial_reflections(integrated_data)
        integrated_data = scale_partial_reflections(integrated_data)

    if 'partiality' in integrated_data:
        selection = integrated_data['partiality'] < 0.99
        if selection.count(True) > 0 and not keep_partials:
            integrated_data.del_selected(selection)
            logger.info('Removing %d incomplete reflections' % \
              selection.count(True))

    experiment = experiment_list[0]
    assert (not experiment.scan is None)

    # sort data before output
    nref = len(integrated_data['miller_index'])
    indices = flex.size_t_range(nref)
    perm = sorted(indices, key=lambda k: integrated_data['miller_index'][k])
    integrated_data = integrated_data.select(flex.size_t(perm))

    assert (not experiment.goniometer is None)

    axis = matrix.col(experiment.goniometer.get_rotation_axis_datum())

    beam = matrix.col(experiment.beam.get_direction())
    s0 = matrix.col(experiment.beam.get_s0())

    F = matrix.sqr(experiment.goniometer.get_fixed_rotation())
    S = matrix.sqr(experiment.goniometer.get_setting_rotation())
    unit_cell = experiment.crystal.get_unit_cell()

    if debug:
        m_format = '%6.3f%6.3f%6.3f\n%6.3f%6.3f%6.3f\n%6.3f%6.3f%6.3f'
        c_format = '%.2f %.2f %.2f %.2f %.2f %.2f'

        logger.info('Unit cell parameters from experiment: %s' %
                    (c_format % unit_cell.parameters()))
        logger.info(
            'Symmetry: %s' %
            experiment.crystal.get_space_group().type().lookup_symbol())

        logger.info('Goniometer fixed matrix:\n%s' % (m_format % F.elems))
        logger.info('Goniometer setting matrix:\n%s' % (m_format % S.elems))
        logger.info('Goniometer scan axis:\n%6.3f%6.3f%6.3f' % (axis.elems))

    # detector scaling info
    assert (len(experiment.detector) == 1)
    panel = experiment.detector[0]
    dims = panel.get_image_size()
    pixel = panel.get_pixel_size()
    fast_axis = matrix.col(panel.get_fast_axis())
    slow_axis = matrix.col(panel.get_slow_axis())
    normal = fast_axis.cross(slow_axis)
    detector2t = s0.angle(normal, deg=True)
    origin = matrix.col(panel.get_origin())

    if debug:
        logger.info('Detector fast, slow axes:')
        logger.info('%6.3f%6.3f%6.3f' % (fast_axis.elems))
        logger.info('%6.3f%6.3f%6.3f' % (slow_axis.elems))
        logger.info('Detector two theta (degrees): %.2f' % detector2t)

    scl_x = 512.0 / (dims[0] * pixel[0])
    scl_y = 512.0 / (dims[1] * pixel[1])

    image_range = experiment.scan.get_image_range()

    from cctbx.array_family import flex as cflex  # implicit import
    from cctbx.miller import map_to_asu_isym  # implicit import

    # gather the required information for the reflection file

    nref = len(integrated_data['miller_index'])
    zdet = flex.double(integrated_data['xyzcal.px'].parts()[2])

    miller_index = integrated_data['miller_index']

    I = None
    sigI = None

    # export including scale factors

    if 'lp' in integrated_data:
        lp = integrated_data['lp']
    else:
        lp = flex.double(nref, 1.0)
    if 'qe' in integrated_data:
        qe = integrated_data['qe']
    elif 'dqe' in integrated_data:
        qe = integrated_data['dqe']
    else:
        qe = flex.double(nref, 1.0)
    scl = lp / qe

    if summation:
        I = integrated_data['intensity.sum.value'] * scl
        V = integrated_data['intensity.sum.variance'] * scl * scl
        assert V.all_gt(0)
        sigI = flex.sqrt(V)
    else:
        I = integrated_data['intensity.prf.value'] * scl
        V = integrated_data['intensity.prf.variance'] * scl * scl
        assert V.all_gt(0)
        sigI = flex.sqrt(V)

    # figure out scaling to make sure data fit into format 2F8.2 i.e. Imax < 1e5

    Imax = flex.max(I)

    if debug:
        logger.info('Maximum intensity in file: %8.2f' % Imax)

    if Imax > 99999.0:
        scale = 99999.0 / Imax
        I = I * scale
        sigI = sigI * scale

    phi_start, phi_range = experiment.scan.get_image_oscillation(
        image_range[0])

    if predict:
        logger.info('Using scan static predicted spot locations')
        from dials.algorithms.spot_prediction import ScanStaticReflectionPredictor
        predictor = ScanStaticReflectionPredictor(experiment)
        UB = experiment.crystal.get_A()
        predictor.for_reflection_table(integrated_data, UB)

    if not experiment.crystal.num_scan_points:
        logger.info('No scan varying model: use static')
        static = True
    else:
        static = False

    fout = open(hklout, 'w')

    for j in range(nref):

        h, k, l = miller_index[j]

        if predict:
            x_mm, y_mm, z_rad = integrated_data['xyzcal.mm'][j]
        else:
            x_mm, y_mm, z_rad = integrated_data['xyzobs.mm.value'][j]

        z0 = integrated_data['xyzcal.px'][j][2]
        istol = int(round(10000 * unit_cell.stol((h, k, l))))

        if predict or static:
            # work from a scan static model & assume perfect goniometer
            # FIXME maybe should work back in the option to predict spot positions
            UB = matrix.sqr(experiment.crystal.get_A())
            phi = phi_start + z0 * phi_range
            R = axis.axis_and_angle_as_r3_rotation_matrix(phi, deg=True)
            RUB = S * R * F * UB
        else:
            # properly compute RUB for every reflection
            UB = matrix.sqr(
                experiment.crystal.get_A_at_scan_point(int(round(z0))))
            phi = phi_start + z0 * phi_range
            R = axis.axis_and_angle_as_r3_rotation_matrix(phi, deg=True)
            RUB = S * R * F * UB

        x = RUB * (h, k, l)
        s = (s0 + x).normalize()

        # can also compute s based on centre of mass of spot
        # s = (origin + x_mm * fast_axis + y_mm * slow_axis).normalize()

        astar = (RUB * (1, 0, 0)).normalize()
        bstar = (RUB * (0, 1, 0)).normalize()
        cstar = (RUB * (0, 0, 1)).normalize()

        ix = beam.dot(astar)
        iy = beam.dot(bstar)
        iz = beam.dot(cstar)

        dx = s.dot(astar)
        dy = s.dot(bstar)
        dz = s.dot(cstar)

        x = x_mm * scl_x
        y = y_mm * scl_y
        z = (z_rad * 180 / math.pi - phi_start) / phi_range

        fout.write('%4d%4d%4d%8.2f%8.2f%4d%8.5f%8.5f%8.5f%8.5f%8.5f%8.5f' % \
                   (h, k, l, I[j], sigI[j], run, ix, dx, iy, dy, iz, dz))
        fout.write('%7.2f%7.2f%8.2f%7.2f%5d\n' % (x, y, z, detector2t, istol))

    fout.close()
    logger.info('Output %d reflections to %s' % (nref, hklout))
Example #30
0
def export_sadabs(integrated_data, experiment_list, params):
    """Export data from integrated_data corresponding to experiment_list to a
    file for input to SADABS. FIXME probably need to make a .p4p file as
    well..."""

    from dials.array_family import flex

    # for the moment assume (and assert) that we will convert data from exactly
    # one lattice...

    assert len(experiment_list) == 1
    # select reflections that are assigned to an experiment (i.e. non-negative id)

    integrated_data = integrated_data.select(integrated_data["id"] >= 0)
    assert max(integrated_data["id"]) == 0

    # export for sadabs should only be for non-scaled reflections
    assert any(i in integrated_data
               for i in ["intensity.sum.value", "intensity.prf.value"])

    integrated_data = filter_reflection_table(
        integrated_data,
        intensity_choice=params.intensity,
        partiality_threshold=params.mtz.partiality_threshold,
        combine_partials=params.mtz.combine_partials,
        min_isigi=params.mtz.min_isigi,
        filter_ice_rings=params.mtz.filter_ice_rings,
        d_min=params.mtz.d_min,
    )

    experiment = experiment_list[0]
    assert experiment.scan is not None

    # sort data before output
    nref = len(integrated_data["miller_index"])
    indices = flex.size_t_range(nref)
    perm = sorted(indices, key=lambda k: integrated_data["miller_index"][k])
    integrated_data = integrated_data.select(flex.size_t(perm))

    assert experiment.goniometer is not None

    # Warn of unhelpful SADABS behaviour for certain multi-sequence data sets
    hkl_file_root, _ = os.path.splitext(params.sadabs.hklout)
    if not params.sadabs.run or re.search("_0+$", hkl_file_root):
        logger.warning(
            "It seems SADABS rejects multi-sequence data when the first "
            "filename ends "
            "'_0', '_00', etc., with a cryptic error message:\n"
            "\t'Inconsistent 2theta values in same scan'.\n"
            "You may need to begin the numbering of your SADABS HKL files from 1, "
            "rather than 0, and ensure the SADABS run/batch number is greater than 0."
        )

    axis = matrix.col(experiment.goniometer.get_rotation_axis_datum())

    beam = matrix.col(experiment.beam.get_sample_to_source_direction())
    s0 = matrix.col(experiment.beam.get_s0())

    F = matrix.sqr(experiment.goniometer.get_fixed_rotation())
    S = matrix.sqr(experiment.goniometer.get_setting_rotation())
    unit_cell = experiment.crystal.get_unit_cell()

    if params.debug:
        m_format = "%6.3f%6.3f%6.3f\n%6.3f%6.3f%6.3f\n%6.3f%6.3f%6.3f"
        c_format = "%.2f %.2f %.2f %.2f %.2f %.2f"

        logger.info(
            "Unit cell parameters from experiment: %s",
            c_format % unit_cell.parameters(),
        )
        logger.info(
            "Symmetry: %s",
            experiment.crystal.get_space_group().type().lookup_symbol())

        logger.info("Goniometer fixed matrix:\n%s", m_format % F.elems)
        logger.info("Goniometer setting matrix:\n%s", m_format % S.elems)
        logger.info("Goniometer scan axis:\n%6.3f%6.3f%6.3f", axis.elems)

    # detector scaling info
    assert len(experiment.detector) == 1
    panel = experiment.detector[0]
    dims = panel.get_image_size()
    pixel = panel.get_pixel_size()
    fast_axis = matrix.col(panel.get_fast_axis())
    slow_axis = matrix.col(panel.get_slow_axis())
    normal = fast_axis.cross(slow_axis)
    detector2t = s0.angle(normal, deg=True)

    if params.debug:
        logger.info("Detector fast, slow axes:")
        logger.info("%6.3f%6.3f%6.3f", fast_axis.elems)
        logger.info("%6.3f%6.3f%6.3f", slow_axis.elems)
        logger.info("Detector two theta (degrees): %.2f", detector2t)

    scl_x = 512.0 / (dims[0] * pixel[0])
    scl_y = 512.0 / (dims[1] * pixel[1])

    image_range = experiment.scan.get_image_range()

    from cctbx.array_family import flex as cflex  # implicit import # noqa: F401
    from cctbx.miller import map_to_asu_isym  # implicit import # noqa: F401

    # gather the required information for the reflection file

    nref = len(integrated_data["miller_index"])

    miller_index = integrated_data["miller_index"]

    if "intensity.sum.value" in integrated_data:
        I = integrated_data["intensity.sum.value"]
        V = integrated_data["intensity.sum.variance"]
        assert V.all_gt(0)
        sigI = flex.sqrt(V)
    else:
        I = integrated_data["intensity.prf.value"]
        V = integrated_data["intensity.prf.variance"]
        assert V.all_gt(0)
        sigI = flex.sqrt(V)

    # figure out scaling to make sure data fit into format 2F8.2 i.e. Imax < 1e5

    Imax = flex.max(I)

    if params.debug:
        logger.info("Maximum intensity in file: %8.2f", Imax)

    if Imax > 99999.0:
        scale = 99999.0 / Imax
        I = I * scale
        sigI = sigI * scale

    phi_start, phi_range = experiment.scan.get_image_oscillation(
        image_range[0])

    if params.sadabs.predict:
        logger.info("Using scan static predicted spot locations")
        from dials.algorithms.spot_prediction import ScanStaticReflectionPredictor

        predictor = ScanStaticReflectionPredictor(experiment)
        UB = experiment.crystal.get_A()
        predictor.for_reflection_table(integrated_data, UB)

    if not experiment.crystal.num_scan_points:
        logger.info("No scan varying model: use static")
        static = True
    else:
        static = False

    with open(params.sadabs.hklout, "w") as fout:

        for j in range(nref):

            h, k, l = miller_index[j]

            if params.sadabs.predict:
                x_mm, y_mm, z_rad = integrated_data["xyzcal.mm"][j]
            else:
                x_mm, y_mm, z_rad = integrated_data["xyzobs.mm.value"][j]

            z0 = integrated_data["xyzcal.px"][j][2]
            istol = int(round(10000 * unit_cell.stol((h, k, l))))

            if params.sadabs.predict or static:
                # work from a scan static model & assume perfect goniometer
                # FIXME maybe should work back in the option to predict spot positions
                UB = matrix.sqr(experiment.crystal.get_A())
                phi = phi_start + z0 * phi_range
                R = axis.axis_and_angle_as_r3_rotation_matrix(phi, deg=True)
                RUB = S * R * F * UB
            else:
                # properly compute RUB for every reflection
                UB = matrix.sqr(
                    experiment.crystal.get_A_at_scan_point(int(round(z0))))
                phi = phi_start + z0 * phi_range
                R = axis.axis_and_angle_as_r3_rotation_matrix(phi, deg=True)
                RUB = S * R * F * UB

            x = RUB * (h, k, l)
            s = (s0 + x).normalize()

            # can also compute s based on centre of mass of spot
            # s = (origin + x_mm * fast_axis + y_mm * slow_axis).normalize()

            astar = (RUB * (1, 0, 0)).normalize()
            bstar = (RUB * (0, 1, 0)).normalize()
            cstar = (RUB * (0, 0, 1)).normalize()

            ix = beam.dot(astar)
            iy = beam.dot(bstar)
            iz = beam.dot(cstar)

            dx = s.dot(astar)
            dy = s.dot(bstar)
            dz = s.dot(cstar)

            x = x_mm * scl_x
            y = y_mm * scl_y
            z = (z_rad * 180 / math.pi - phi_start) / phi_range

            fout.write("%4d%4d%4d%8.2f%8.2f%4d%8.5f%8.5f%8.5f%8.5f%8.5f%8.5f" %
                       (h, k, l, I[j], sigI[j], params.sadabs.run, ix, dx, iy,
                        dy, iz, dz))
            fout.write("%7.2f%7.2f%8.2f%7.2f%5d\n" %
                       (x, y, z, detector2t, istol))

    fout.close()
    logger.info("Output %d reflections to %s", nref, params.sadabs.hklout)
Example #31
0
def export_xds_ascii(integrated_data,
                     experiment_list,
                     hklout,
                     summation=False,
                     include_partials=False,
                     keep_partials=False,
                     var_model=(1, 0)):
    '''Export data from integrated_data corresponding to experiment_list to
  an XDS_ASCII.HKL formatted text file.'''

    from dials.array_family import flex

    # for the moment assume (and assert) that we will convert data from exactly
    # one lattice...

    assert (len(experiment_list) == 1)
    # select reflections that are assigned to an experiment (i.e. non-negative id)

    integrated_data = integrated_data.select(integrated_data['id'] >= 0)
    assert max(integrated_data['id']) == 0

    if not summation:
        assert ('intensity.prf.value' in integrated_data)

    if 'intensity.prf.variance' in integrated_data:
        selection = integrated_data.get_flags(integrated_data.flags.integrated,
                                              all=True)
    else:
        selection = integrated_data.get_flags(
            integrated_data.flags.integrated_sum)
    integrated_data = integrated_data.select(selection)

    selection = integrated_data['intensity.sum.variance'] <= 0
    if selection.count(True) > 0:
        integrated_data.del_selected(selection)
        logger.info('Removing %d reflections with negative variance' % \
              selection.count(True))

    if 'intensity.prf.variance' in integrated_data:
        selection = integrated_data['intensity.prf.variance'] <= 0
        if selection.count(True) > 0:
            integrated_data.del_selected(selection)
            logger.info('Removing %d profile reflections with negative variance' % \
                  selection.count(True))

    if include_partials:
        integrated_data = sum_partial_reflections(integrated_data)
        integrated_data = scale_partial_reflections(integrated_data)

    if 'partiality' in integrated_data:
        selection = integrated_data['partiality'] < 0.99
        if selection.count(True) > 0 and not keep_partials:
            integrated_data.del_selected(selection)
            logger.info('Removing %d incomplete reflections' % \
              selection.count(True))

    experiment = experiment_list[0]

    # sort data before output
    nref = len(integrated_data['miller_index'])
    indices = flex.size_t_range(nref)

    import copy
    unique = copy.deepcopy(integrated_data['miller_index'])
    from cctbx.miller import map_to_asu
    map_to_asu(experiment.crystal.get_space_group().type(), False, unique)

    perm = sorted(indices, key=lambda k: unique[k])
    integrated_data = integrated_data.select(flex.size_t(perm))

    from scitbx import matrix
    from rstbx.cftbx.coordinate_frame_helpers import align_reference_frame

    assert (not experiment.goniometer is None)

    unit_cell = experiment.crystal.get_unit_cell()

    from scitbx.array_family import flex
    from math import sqrt

    assert (not experiment.scan is None)
    image_range = experiment.scan.get_image_range()
    phi_start, phi_range = experiment.scan.get_image_oscillation(
        image_range[0])

    # gather the required information for the reflection file

    nref = len(integrated_data['miller_index'])
    zdet = flex.double(integrated_data['xyzcal.px'].parts()[2])

    miller_index = integrated_data['miller_index']

    I = None
    sigI = None

    # export including scale factors

    if 'lp' in integrated_data:
        lp = integrated_data['lp']
    else:
        lp = flex.double(nref, 1.0)
    if 'dqe' in integrated_data:
        dqe = integrated_data['dqe']
    else:
        dqe = flex.double(nref, 1.0)
    scl = lp / dqe

    # profile correlation
    if 'profile.correlation' in integrated_data:
        prof_corr = 100.0 * integrated_data['profile.correlation']
    else:
        prof_corr = flex.double(nref, 100.0)

    # partiality
    if 'partiality' in integrated_data:
        partiality = 100 * integrated_data['partiality']
    else:
        prof_corr = flex.double(nref, 100.0)

    if summation:
        I = integrated_data['intensity.sum.value'] * scl
        V = integrated_data['intensity.sum.variance'] * scl * scl
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)
    else:
        I = integrated_data['intensity.prf.value'] * scl
        V = integrated_data['intensity.prf.variance'] * scl * scl
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)

    fout = open(hklout, 'w')

    # first write the header - in the "standard" coordinate frame...

    panel = experiment.detector[0]
    fast = panel.get_fast_axis()
    slow = panel.get_slow_axis()
    Rd = align_reference_frame(fast, (1, 0, 0), slow, (0, 1, 0))
    print 'Coordinate change:'
    print '%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n' % Rd.elems

    fast = Rd * fast
    slow = Rd * slow

    qx, qy = panel.get_pixel_size()
    nx, ny = panel.get_image_size()
    distance = matrix.col(Rd * panel.get_origin()).dot(
        matrix.col(Rd * panel.get_normal()))
    org = Rd * (matrix.col(panel.get_origin()) -
                distance * matrix.col(panel.get_normal()))
    orgx = -org.dot(fast) / qx
    orgy = -org.dot(slow) / qy

    UB = Rd * matrix.sqr(experiment.crystal.get_A())
    real_space_ABC = UB.inverse().elems

    axis = Rd * experiment.goniometer.get_rotation_axis()
    beam = Rd * experiment.beam.get_s0()
    cell_fmt = '%9.3f %9.3f %9.3f %7.3f %7.3f %7.3f'
    axis_fmt = '%9.3f %9.3f %9.3f'

    fout.write('\n'.join([
        '!FORMAT=XDS_ASCII    MERGE=FALSE    FRIEDEL\'S_LAW=TRUE',
        '!Generated by dials.export',
        '!DATA_RANGE= %d %d' % image_range,
        '!ROTATION_AXIS= %9.6f %9.6f %9.6f' % axis.elems,
        '!OSCILLATION_RANGE= %f' % phi_range,
        '!STARTING_ANGLE= %f' % phi_start,
        '!STARTING_FRAME= %d' % image_range[0],
        '!SPACE_GROUP_NUMBER= %d' %
        experiment.crystal.get_space_group().type().number(),
        '!UNIT_CELL_CONSTANTS= %s' % (cell_fmt % unit_cell.parameters()),
        '!UNIT_CELL_A-AXIS= %s' % (axis_fmt % real_space_ABC[0:3]),
        '!UNIT_CELL_B-AXIS= %s' % (axis_fmt % real_space_ABC[3:6]),
        '!UNIT_CELL_C-AXIS= %s' % (axis_fmt % real_space_ABC[6:9]),
        '!X-RAY_WAVELENGTH= %f' % experiment.beam.get_wavelength(),
        '!INCIDENT_BEAM_DIRECTION= %f %f %f' % beam.elems,
        '!NX= %d NY= %d QX= %f QY= %f' % (nx, ny, qx, qy),
        '!ORGX= %9.2f ORGY= %9.2f' % (orgx, orgy),
        '!DETECTOR_DISTANCE= %8.3f' % distance,
        '!DIRECTION_OF_DETECTOR_X-AXIS= %9.5f %9.5f %9.5f' % fast.elems,
        '!DIRECTION_OF_DETECTOR_Y-AXIS= %9.5f %9.5f %9.5f' % slow.elems,
        '!VARIANCE_MODEL= %7.3e %7.3e' % var_model,
        '!NUMBER_OF_ITEMS_IN_EACH_DATA_RECORD=12', '!ITEM_H=1', '!ITEM_K=2',
        '!ITEM_L=3', '!ITEM_IOBS=4', '!ITEM_SIGMA(IOBS)=5', '!ITEM_XD=6',
        '!ITEM_YD=7', '!ITEM_ZD=8', '!ITEM_RLP=9', '!ITEM_PEAK=10',
        '!ITEM_CORR=11', '!ITEM_PSI=12', '!END_OF_HEADER', ''
    ]))

    # then write the data records

    s0 = Rd * matrix.col(experiment.beam.get_s0())

    for j in range(nref):
        x, y, z = integrated_data['xyzcal.px'][j]
        phi = phi_start + z * phi_range
        h, k, l = miller_index[j]
        X = (UB * (h, k, l)).rotate(axis, phi, deg=True)
        s = s0 + X
        g = s.cross(s0).normalize()
        f = (s - s0).normalize()

        # find component of beam perpendicular to f, e
        e = -(s + s0).normalize()
        if h == k and k == l:
            u = (h, -h, 0)
        else:
            u = (k - l, l - h, h - k)
        q = (matrix.col(u).transpose() *
             UB.inverse()).normalize().transpose().rotate(axis, phi, deg=True)

        psi = q.angle(g, deg=True)
        if q.dot(e) < 0:
            psi *= -1

        fout.write('%d %d %d %f %f %f %f %f %f %.1f %.1f %f\n' %
                   (h, k, l, I[j], sigI[j], x, y, z, scl[j], partiality[j],
                    prof_corr[j], psi))

    fout.write('!END_OF_DATA\n')
    fout.close()
    logger.info('Output %d reflections to %s' % (nref, hklout))
Example #32
0
    def __init__(
        self,
        reflections,
        experiments,
        nref_per_degree=None,
        max_sample_size=None,
        min_sample_size=0,
        close_to_spindle_cutoff=0.02,
        scan_margin=0.0,
        outlier_detector=None,
        weighting_strategy_override=None,
    ):

        if len(reflections) == 0:
            raise ValueError("Empty reflections table provided to ReflectionManager")

        # keep track of models
        self._experiments = experiments
        goniometers = [e.goniometer for e in self._experiments]
        self._axes = [
            matrix.col(g.get_rotation_axis()) if g else None for g in goniometers
        ]
        self._s0vecs = [matrix.col(e.beam.get_s0()) for e in self._experiments]

        # unset the refinement flags (creates flags field if needed)
        reflections.unset_flags(
            flex.size_t_range(len(reflections)),
            flex.reflection_table.flags.used_in_refinement,
        )

        # check that the observed beam vectors are stored: if not, compute them
        n_s1_set = set_obs_s1(reflections, experiments)
        if n_s1_set > 0:
            logger.debug("Set scattering vectors for %d reflections", n_s1_set)

        # keep track of the original indices of the reflections
        reflections["iobs"] = flex.size_t_range(len(reflections))

        # Check for monotonically increasing value range. If not, ref_table isn't sorted,
        # and proceed to sort by id and panel. This is required for the C++ extension
        # modules to allow for nlogn subselection of values used in refinement.
        l_id = reflections["id"]
        id0 = l_id[0]
        for id_x in l_id[1:]:
            if id0 <= id_x:
                id0 = id_x
            else:
                reflections.sort("id")  # Ensuring the ref_table is sorted by id
                reflections.subsort(
                    "id", "panel"
                )  # Ensuring that within each sorted id block, sorting is next performed by panel
                break

        # set up the reflection inclusion criteria
        self._close_to_spindle_cutoff = close_to_spindle_cutoff  # close to spindle
        self._scan_margin = DEG2RAD * scan_margin  # close to the scan edge
        self._outlier_detector = outlier_detector  # for outlier rejection
        self._nref_per_degree = nref_per_degree  # random subsets
        self._max_sample_size = max_sample_size  # sample size ceiling
        self._min_sample_size = min_sample_size  # sample size floor

        # exclude reflections that fail some inclusion criteria
        refs_to_keep = self._id_refs_to_keep(reflections)
        self._accepted_refs_size = len(refs_to_keep)

        # set entering flags for all reflections
        reflections.calculate_entering_flags(self._experiments)

        # set observed frame numbers for all reflections if not already present
        calculate_frame_numbers(reflections, self._experiments)

        # reset all use flags
        self.reset_accepted_reflections(reflections)

        # put full list of indexed reflections aside and select only the reflections
        # that were not excluded to manage
        self._indexed = reflections
        self._reflections = reflections.select(refs_to_keep)

        # set exclusion flag for reflections that failed the tests
        refs_to_excl = flex.bool(len(self._indexed), True)
        refs_to_excl.set_selected(refs_to_keep, False)
        self._indexed.set_flags(
            refs_to_excl, self._indexed.flags.excluded_for_refinement
        )

        # set weights for all kept reflections
        if weighting_strategy_override is not None:
            self._weighting_strategy = weighting_strategy_override
        self._weighting_strategy.calculate_weights(self._reflections)

        # not known until the manager is finalised
        self._sample_size = None
Example #33
0
def export_xds_ascii(integrated_data,
                     experiment_list,
                     params,
                     var_model=(1, 0)):
    """Export data from integrated_data corresponding to experiment_list to
    an XDS_ASCII.HKL formatted text file."""

    from dials.array_family import flex

    # for the moment assume (and assert) that we will convert data from exactly
    # one lattice...

    assert len(experiment_list) == 1
    # select reflections that are assigned to an experiment (i.e. non-negative id)

    integrated_data = integrated_data.select(integrated_data["id"] >= 0)
    assert max(integrated_data["id"]) == 0

    # export for xds_ascii should only be for non-scaled reflections
    assert any([
        i in integrated_data
        for i in ["intensity.sum.value", "intensity.prf.value"]
    ])

    integrated_data = filter_reflection_table(
        integrated_data,
        intensity_choice=params.intensity,
        partiality_threshold=params.mtz.partiality_threshold,
        combine_partials=params.mtz.combine_partials,
        min_isigi=params.mtz.min_isigi,
        filter_ice_rings=params.mtz.filter_ice_rings,
        d_min=params.mtz.d_min,
    )

    # calculate the scl = lp/dqe correction for outputting but don't apply it as
    # it has already been applied in filter_reflection_table
    integrated_data, scl = FilteringReductionMethods.calculate_lp_qe_correction_and_filter(
        integrated_data)

    experiment = experiment_list[0]

    # sort data before output
    nref = len(integrated_data["miller_index"])
    indices = flex.size_t_range(nref)

    import copy

    unique = copy.deepcopy(integrated_data["miller_index"])
    from cctbx.miller import map_to_asu

    map_to_asu(experiment.crystal.get_space_group().type(), False, unique)

    perm = sorted(indices, key=lambda k: unique[k])
    integrated_data = integrated_data.select(flex.size_t(perm))

    from scitbx import matrix
    from rstbx.cftbx.coordinate_frame_helpers import align_reference_frame

    assert not experiment.goniometer is None

    unit_cell = experiment.crystal.get_unit_cell()

    from scitbx.array_family import flex

    assert not experiment.scan is None
    image_range = experiment.scan.get_image_range()
    phi_start, phi_range = experiment.scan.get_image_oscillation(
        image_range[0])

    # gather the required information for the reflection file

    nref = len(integrated_data["miller_index"])
    zdet = flex.double(integrated_data["xyzcal.px"].parts()[2])

    miller_index = integrated_data["miller_index"]

    # profile correlation
    if "profile.correlation" in integrated_data:
        prof_corr = 100.0 * integrated_data["profile.correlation"]
    else:
        prof_corr = flex.double(nref, 100.0)

    # partiality
    if "partiality" in integrated_data:
        partiality = 100 * integrated_data["partiality"]
    else:
        prof_corr = flex.double(nref, 100.0)

    if "intensity.sum.value" in integrated_data:
        I = integrated_data["intensity.sum.value"]
        V = integrated_data["intensity.sum.variance"]
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)
    else:
        I = integrated_data["intensity.prf.value"]
        V = integrated_data["intensity.prf.variance"]
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)

    fout = open(params.xds_ascii.hklout, "w")

    # first write the header - in the "standard" coordinate frame...

    panel = experiment.detector[0]
    fast = panel.get_fast_axis()
    slow = panel.get_slow_axis()
    Rd = align_reference_frame(fast, (1, 0, 0), slow, (0, 1, 0))
    print("Coordinate change:")
    print("%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n" %
          Rd.elems)

    fast = Rd * fast
    slow = Rd * slow

    qx, qy = panel.get_pixel_size()
    nx, ny = panel.get_image_size()
    distance = matrix.col(Rd * panel.get_origin()).dot(
        matrix.col(Rd * panel.get_normal()))
    org = Rd * (matrix.col(panel.get_origin()) -
                distance * matrix.col(panel.get_normal()))
    orgx = -org.dot(fast) / qx
    orgy = -org.dot(slow) / qy

    UB = Rd * matrix.sqr(experiment.crystal.get_A())
    real_space_ABC = UB.inverse().elems

    axis = Rd * experiment.goniometer.get_rotation_axis()
    beam = Rd * experiment.beam.get_s0()
    cell_fmt = "%9.3f %9.3f %9.3f %7.3f %7.3f %7.3f"
    axis_fmt = "%9.3f %9.3f %9.3f"

    fout.write("\n".join([
        "!FORMAT=XDS_ASCII    MERGE=FALSE    FRIEDEL'S_LAW=TRUE",
        "!Generated by dials.export",
        "!DATA_RANGE= %d %d" % image_range,
        "!ROTATION_AXIS= %9.6f %9.6f %9.6f" % axis.elems,
        "!OSCILLATION_RANGE= %f" % phi_range,
        "!STARTING_ANGLE= %f" % phi_start,
        "!STARTING_FRAME= %d" % image_range[0],
        "!SPACE_GROUP_NUMBER= %d" %
        experiment.crystal.get_space_group().type().number(),
        "!UNIT_CELL_CONSTANTS= %s" % (cell_fmt % unit_cell.parameters()),
        "!UNIT_CELL_A-AXIS= %s" % (axis_fmt % real_space_ABC[0:3]),
        "!UNIT_CELL_B-AXIS= %s" % (axis_fmt % real_space_ABC[3:6]),
        "!UNIT_CELL_C-AXIS= %s" % (axis_fmt % real_space_ABC[6:9]),
        "!X-RAY_WAVELENGTH= %f" % experiment.beam.get_wavelength(),
        "!INCIDENT_BEAM_DIRECTION= %f %f %f" % beam.elems,
        "!NX= %d NY= %d QX= %f QY= %f" % (nx, ny, qx, qy),
        "!ORGX= %9.2f ORGY= %9.2f" % (orgx, orgy),
        "!DETECTOR_DISTANCE= %8.3f" % distance,
        "!DIRECTION_OF_DETECTOR_X-AXIS= %9.5f %9.5f %9.5f" % fast.elems,
        "!DIRECTION_OF_DETECTOR_Y-AXIS= %9.5f %9.5f %9.5f" % slow.elems,
        "!VARIANCE_MODEL= %7.3e %7.3e" % var_model,
        "!NUMBER_OF_ITEMS_IN_EACH_DATA_RECORD=12",
        "!ITEM_H=1",
        "!ITEM_K=2",
        "!ITEM_L=3",
        "!ITEM_IOBS=4",
        "!ITEM_SIGMA(IOBS)=5",
        "!ITEM_XD=6",
        "!ITEM_YD=7",
        "!ITEM_ZD=8",
        "!ITEM_RLP=9",
        "!ITEM_PEAK=10",
        "!ITEM_CORR=11",
        "!ITEM_PSI=12",
        "!END_OF_HEADER",
        "",
    ]))

    # then write the data records

    s0 = Rd * matrix.col(experiment.beam.get_s0())

    for j in range(nref):
        x, y, z = integrated_data["xyzcal.px"][j]
        phi = phi_start + z * phi_range
        h, k, l = miller_index[j]
        X = (UB * (h, k, l)).rotate(axis, phi, deg=True)
        s = s0 + X
        g = s.cross(s0).normalize()
        f = (s - s0).normalize()

        # find component of beam perpendicular to f, e
        e = -(s + s0).normalize()
        if h == k and k == l:
            u = (h, -h, 0)
        else:
            u = (k - l, l - h, h - k)
        q = ((matrix.col(u).transpose() *
              UB.inverse()).normalize().transpose().rotate(axis, phi,
                                                           deg=True))

        psi = q.angle(g, deg=True)
        if q.dot(e) < 0:
            psi *= -1

        fout.write("%d %d %d %f %f %f %f %f %f %.1f %.1f %f\n" % (
            h,
            k,
            l,
            I[j],
            sigI[j],
            x,
            y,
            z,
            scl[j],
            partiality[j],
            prof_corr[j],
            psi,
        ))

    fout.write("!END_OF_DATA\n")
    fout.close()
    logger.info("Output %d reflections to %s" %
                (nref, params.xds_ascii.hklout))
Example #34
0
    def __call__(self, experiments):
        """
        Do the spot finding.

        :param experiments: The experiments to process
        :return: The observed spots
        """
        import six.moves.cPickle as pickle
        from dxtbx.format.image import ImageBool

        # Loop through all the experiments and get the unique imagesets
        imagesets = []
        for experiment in experiments:
            if experiment.imageset not in imagesets:
                imagesets.append(experiment.imageset)

        # Loop through all the imagesets and find the strong spots
        reflections = flex.reflection_table()

        for j, imageset in enumerate(imagesets):

            # Find the strong spots in the sequence
            logger.info("-" * 80)
            logger.info("Finding strong spots in imageset %d" % j)
            logger.info("-" * 80)
            logger.info("")
            table, hot_mask = self._find_spots_in_imageset(imageset)

            # Fix up the experiment ID's now
            table["id"] = flex.int(table.nrows(), -1)
            for i, experiment in enumerate(experiments):
                if experiment.imageset is not imageset:
                    continue
                if experiment.scan:
                    z0, z1 = experiment.scan.get_array_range()
                    z = table["xyzobs.px.value"].parts()[2]
                    table["id"].set_selected((z > z0) & (z < z1), i)
                    if experiment.identifier:
                        table.experiment_identifiers()[i] = experiment.identifier
                else:
                    table["id"] = flex.int(table.nrows(), j)
                    if experiment.identifier:
                        table.experiment_identifiers()[j] = experiment.identifier
            missed = table["id"] == -1
            assert missed.count(True) == 0, missed.count(True)

            reflections.extend(table)
            # Write a hot pixel mask
            if self.write_hot_mask:
                if not imageset.external_lookup.mask.data.empty():
                    for m1, m2 in zip(hot_mask, imageset.external_lookup.mask.data):
                        m1 &= m2.data()
                    imageset.external_lookup.mask.data = ImageBool(hot_mask)
                else:
                    imageset.external_lookup.mask.data = ImageBool(hot_mask)
                imageset.external_lookup.mask.filename = "%s_%d.pickle" % (
                    self.hot_mask_prefix,
                    i,
                )

                # Write the hot mask
                with open(imageset.external_lookup.mask.filename, "wb") as outfile:
                    pickle.dump(hot_mask, outfile, protocol=pickle.HIGHEST_PROTOCOL)

        # Set the strong spot flag
        reflections.set_flags(
            flex.size_t_range(len(reflections)), reflections.flags.strong
        )

        # Check for overloads
        reflections.is_overloaded(experiments)

        # Return the reflections
        return reflections
Example #35
0
    def find_spots(self, experiments: ExperimentList) -> flex.reflection_table:
        """
        Do spotfinding for a set of experiments.

        Args:
            experiments: The experiment list to process

        Returns:
            A new reflection table of found reflections
        """
        # Loop through all the experiments and get the unique imagesets
        imagesets = []
        for experiment in experiments:
            if experiment.imageset not in imagesets:
                imagesets.append(experiment.imageset)

        # Loop through all the imagesets and find the strong spots
        reflections = flex.reflection_table()

        for j, imageset in enumerate(imagesets):

            # Find the strong spots in the sequence
            logger.info(
                "-" * 80 + "\nFinding strong spots in imageset %d\n" +
                "-" * 80, j)
            table, hot_mask = self._find_spots_in_imageset(imageset)

            # Fix up the experiment ID's now
            table["id"] = flex.int(table.nrows(), -1)
            for i, experiment in enumerate(experiments):
                if experiment.imageset is not imageset:
                    continue
                if not self.is_stills and experiment.scan:
                    z0, z1 = experiment.scan.get_array_range()
                    z = table["xyzobs.px.value"].parts()[2]
                    table["id"].set_selected((z > z0) & (z < z1), i)
                    if experiment.identifier:
                        table.experiment_identifiers(
                        )[i] = experiment.identifier
                else:
                    table["id"] = flex.int(table.nrows(), j)
                    if experiment.identifier:
                        table.experiment_identifiers(
                        )[j] = experiment.identifier
            missed = table["id"] == -1
            assert missed.count(
                True) == 0, "Failed to remap {} experiment IDs".format(
                    missed.count(True))

            reflections.extend(table)
            # Write a hot pixel mask
            if self.write_hot_mask:
                if not imageset.external_lookup.mask.data.empty():
                    for m1, m2 in zip(hot_mask,
                                      imageset.external_lookup.mask.data):
                        m1 &= m2.data()
                    imageset.external_lookup.mask.data = ImageBool(hot_mask)
                else:
                    imageset.external_lookup.mask.data = ImageBool(hot_mask)
                imageset.external_lookup.mask.filename = "%s_%d.pickle" % (
                    self.hot_mask_prefix,
                    i,
                )

                # Write the hot mask
                with open(imageset.external_lookup.mask.filename,
                          "wb") as outfile:
                    pickle.dump(hot_mask,
                                outfile,
                                protocol=pickle.HIGHEST_PROTOCOL)

        # Set the strong spot flag
        reflections.set_flags(flex.size_t_range(len(reflections)),
                              reflections.flags.strong)

        # Check for overloads
        reflections.is_overloaded(experiments)

        # Return the reflections
        return reflections