Example #1
0
def detector_parallel_refiners(params, experiments, reflections):

    print("Refining detector at hierarchy_level=" + \
      str(params.refinement.parameterisation.detector.hierarchy_level), "\n")
    orig_detector = experiments.detectors()[0]
    try:
        h = orig_detector.hierarchy()
    except AttributeError:
        print("This detector does not have a hierarchy")
        raise

    # get the panel groups at the chosen level
    level = params.refinement.parameterisation.detector.hierarchy_level
    try:
        groups = get_panel_groups_at_depth(h, level)
    except AttributeError:
        print(
            "Cannot access the hierarchy at the depth level={0}".format(level))
        raise

    # collect the panel ids for each Panel within the groups
    panels = [p for p in orig_detector]
    panel_ids_by_group = [get_panel_ids_at_root(panels, g) for g in groups]

    print("The detector will be divided into", len(panel_ids_by_group), \
      "groups consisting of the following panels:")
    for i, g in enumerate(panel_ids_by_group):
        print("Group%02d:" % (i + 1), g)
    print()

    # now construct sub-detectors
    def recursive_add_child(d, parent, child):
        """ Creates either a panel group or a panel on the parent,
        and sets it up to match the child """
        if child.is_group():
            newchild = parent.add_group()
        else:
            newchild = parent.add_panel()
            newchild.set_image_size(child.get_image_size())
            newchild.set_trusted_range(child.get_trusted_range())
            newchild.set_pixel_size(child.get_pixel_size())
            newchild.set_px_mm_strategy(child.get_px_mm_strategy())

        m = child.get_local_d_matrix()
        newchild.set_local_frame(m[0::3], m[1::3], m[2::3])
        newchild.set_name(child.get_name())
        if child.is_group():
            for c in child.children():
                recursive_add_child(d, newchild, c)

    from dxtbx.model import Detector
    sub_detectors = [Detector() for e in groups]
    for d, g in zip(sub_detectors, groups):
        d.hierarchy().set_name(g.get_name())
        d.hierarchy().set_frame(g.get_fast_axis(), g.get_slow_axis(),
                                g.get_origin())
        if g.is_group():
            for c in g.children():
                recursive_add_child(d, d.hierarchy(), c)
        else:  # at the bottom of the hierarchy. Note the new panel's frame will be the identity matrix.
            p = d.hierarchy().add_panel()
            p.set_image_size(g.get_image_size())
            p.set_trusted_range(g.get_trusted_range())
            p.set_pixel_size(g.get_pixel_size())
            p.set_px_mm_strategy(g.get_px_mm_strategy())
            p.set_name(g.get_name())

    # set experiment lists for each sub-detector
    sub_det_expts = [copy.deepcopy(experiments) for e in groups]
    for d, exp in zip(sub_detectors, sub_det_expts):
        exp.replace(exp.detectors()[0], d)

    # divide the reflections by sub-detector
    sub_reflections = []
    for pnls in panel_ids_by_group:
        isels = [(reflections['panel'] == pnl).iselection() for pnl in pnls]
        isel = flex.size_t()
        for s in isels:
            isel.extend(s)
        gp_refs = reflections.select(isel)
        # reset panel number to match the sub-detector
        for new_id, old_id in enumerate(pnls):
            sel = gp_refs['panel'] == old_id
            gp_refs['panel'].set_selected(sel, new_id)
        sub_reflections.append(gp_refs)

    # We wish to refine each whole sub-detector as a single group. Therefore
    # we must use hierarchy_level=0 for these jobs
    tmplevel = params.refinement.parameterisation.detector.hierarchy_level
    params.refinement.parameterisation.detector.hierarchy_level = 0

    # do refinements and collect the refined experiments
    def do_work(item):
        refs, exps = item

        if len(refs) < 20:
            print("Cannot refine detector",
                  exps[0].detector.hierarchy().get_name(),
                  "due to too few reflections (", len(refs), ")")
            return exps  # do not refine this detector element

        # Here use the specialised faster refiner
        refiner = StillsDetectorRefinerFactory.from_parameters_data_experiments(
            params, refs, exps)
        refiner.run()
        return refiner.get_experiments()

    refined_exps = easy_mp.parallel_map(func=do_work,
                                        iterable=zip(sub_reflections,
                                                     sub_det_expts),
                                        processes=params.mp.nproc,
                                        method=params.mp.method,
                                        asynchronous=True,
                                        preserve_exception_message=True)

    # update the full detector
    for group, refined_exp in zip(groups, refined_exps):
        refined_det = refined_exp.detectors()[0]
        local_root = refined_det[0]
        f = local_root.get_fast_axis()
        s = local_root.get_slow_axis()
        o = local_root.get_origin()
        group.set_frame(f, s, o)  # propagates local frame changes

    # refine the full detector to get RMSDs per panel
    print()
    print("Refining full recombined detector")
    print("---------------------------------")
    experiments = detector_refiner(params, experiments, reflections)

    # reset hierarchy_level
    params.refinement.parameterisation.detector.hierarchy_level = tmplevel

    return experiments
def detector_parallel_refiners(params, experiments, reflections):

  print "Refining detector at hierarchy_level=" + \
    str(params.refinement.parameterisation.detector.hierarchy_level), "\n"
  orig_detector = experiments.detectors()[0]
  try:
    h = orig_detector.hierarchy()
  except AttributeError:
    print "This detector does not have a hierarchy"
    raise

  # get the panel groups at the chosen level
  level = params.refinement.parameterisation.detector.hierarchy_level
  try:
    groups = get_panel_groups_at_depth(h, level)
  except AttributeError:
    print "Cannot access the hierarchy at the depth level={0}".format(level)
    raise

  # collect the panel ids for each Panel within the groups
  panels = [p for p in orig_detector]
  panel_ids_by_group = [get_panel_ids_at_root(panels, g) for g in groups]

  print "The detector will be divided into", len(panel_ids_by_group), \
    "groups consisting of the following panels:"
  for i, g in enumerate(panel_ids_by_group):
    print "Group%02d:" % (i+1), g
  print

  # now construct sub-detectors
  def recursive_add_child(d, parent, child):
    """ Creates either a panel group or a panel on the parent,
        and sets it up to match the child """
    if child.is_group():
      newchild = parent.add_group()
    else:
      newchild = parent.add_panel()
      newchild.set_image_size(child.get_image_size())
      newchild.set_trusted_range(child.get_trusted_range())
      newchild.set_pixel_size(child.get_pixel_size())
      newchild.set_px_mm_strategy(child.get_px_mm_strategy())

    m = child.get_local_d_matrix()
    newchild.set_local_frame(m[0::3],m[1::3],m[2::3])
    newchild.set_name(child.get_name())
    if child.is_group():
      for c in child.children():
        recursive_add_child(d, newchild, c)

  from dxtbx.model import Detector
  sub_detectors = [Detector() for e in groups]
  for d, g in zip(sub_detectors, groups):
    d.hierarchy().set_name(g.get_name())
    d.hierarchy().set_frame(g.get_fast_axis(),
                            g.get_slow_axis(),
                            g.get_origin())
    if g.is_group():
      for c in g.children():
        recursive_add_child(d, d.hierarchy(), c)
    else: # at the bottom of the hierarchy. Note the new panel's frame will be the identity matrix.
      p = d.hierarchy().add_panel()
      p.set_image_size(g.get_image_size())
      p.set_trusted_range(g.get_trusted_range())
      p.set_pixel_size(g.get_pixel_size())
      p.set_px_mm_strategy(g.get_px_mm_strategy())
      p.set_name(g.get_name())

  # set experiment lists for each sub-detector
  sub_det_expts = [copy.deepcopy(experiments) for e in groups]
  for d, exp in zip(sub_detectors, sub_det_expts):
    exp.replace(exp.detectors()[0], d)

  # divide the reflections by sub-detector
  sub_reflections = []
  for pnls in panel_ids_by_group:
    isels = [(reflections['panel'] == pnl).iselection() for pnl in pnls]
    isel = flex.size_t()
    for s in isels: isel.extend(s)
    gp_refs = reflections.select(isel)
    # reset panel number to match the sub-detector
    for new_id, old_id in enumerate(pnls):
      sel = gp_refs['panel'] == old_id
      gp_refs['panel'].set_selected(sel, new_id)
    sub_reflections.append(gp_refs)

  # We wish to refine each whole sub-detector as a single group. Therefore
  # we must use hierarchy_level=0 for these jobs
  tmplevel = params.refinement.parameterisation.detector.hierarchy_level
  params.refinement.parameterisation.detector.hierarchy_level=0

  # do refinements and collect the refined experiments
  def do_work(item):
    refs, exps = item

    if len(refs) < 20:
      print "Cannot refine detector", exps[0].detector.hierarchy().get_name(), "due to too few reflections (", len(refs), ")"
      return exps # do not refine this detector element

    # Here use the specialised faster refiner
    refiner = StillsDetectorRefinerFactory.from_parameters_data_experiments(
        params, refs, exps)
    refiner.run()
    return refiner.get_experiments()

  refined_exps = easy_mp.parallel_map(
    func = do_work,
    iterable = zip(sub_reflections, sub_det_expts),
    processes = params.mp.nproc,
    method = params.mp.method,
    asynchronous=True,
    preserve_exception_message=True)

  # update the full detector
  for group, refined_exp in zip(groups, refined_exps):
    refined_det = refined_exp.detectors()[0]
    local_root = refined_det[0]
    f = local_root.get_fast_axis()
    s = local_root.get_slow_axis()
    o = local_root.get_origin()
    group.set_frame(f, s, o) # propagates local frame changes

  # refine the full detector to get RMSDs per panel
  print
  print "Refining full recombined detector"
  print "---------------------------------"
  experiments = detector_refiner(params, experiments, reflections)

  # reset hierarchy_level
  params.refinement.parameterisation.detector.hierarchy_level=tmplevel

  return experiments
    def __init__(self, detector, experiment_ids=None, level=0):
        """Initialise the DetectorParameterisationHierarchical object

        Args:
            detector: A dxtbx Detector object to be parameterised.
            experiment_ids (list): The experiment IDs affected by this
                parameterisation. Defaults to None, which is replaced by [0].
            level (int): Select level of the detector hierarchy to determine panel
                groupings that are treated as separate rigid blocks.
        """

        if experiment_ids is None:
            experiment_ids = [0]

        try:
            h = detector.hierarchy()
        except AttributeError:
            print("This detector does not have a hierarchy")
            raise

        # list the panel groups at the chosen level
        try:
            self._groups = get_panel_groups_at_depth(h, level)
        except AttributeError:
            print("Cannot access the hierarchy at the depth level={}".format(
                level))
            raise

        # collect the panel ids for each Panel within the groups
        panels = list(detector)
        self._panel_ids_by_group = [
            get_panel_ids_at_root(panels, g) for g in self._groups
        ]

        p_list = []
        self._group_ids_by_parameter = []
        istate = []
        self._offsets = []
        self._dir1s = []
        self._dir2s = []

        # loop over the groups, collecting initial parameters and states
        for igp, pnl_ids in enumerate(self._panel_ids_by_group):

            panel_centres_in_lab_frame = []
            for i in pnl_ids:
                pnl = detector[i]
                im_size = pnl.get_image_size_mm()
                cntr = (matrix.col(pnl.get_origin()) +
                        0.5 * matrix.col(pnl.get_fast_axis()) * im_size[0] +
                        0.5 * matrix.col(pnl.get_slow_axis()) * im_size[1])
                panel_centres_in_lab_frame.append(cntr)

            # get some vectors we need from the group
            go = matrix.col(self._groups[igp].get_origin())
            d1 = matrix.col(self._groups[igp].get_fast_axis())
            d2 = matrix.col(self._groups[igp].get_slow_axis())
            dn = matrix.col(self._groups[igp].get_normal())

            # we choose the dorg vector for this group to terminate on the group's
            # frame, at a point that we consider close to the centre of the group of
            # panels. This point is defined by taking the 3D centroid of the panel
            # centres then projecting that point onto the group frame.
            centroid = reduce(
                lambda a, b: a + b,
                panel_centres_in_lab_frame) / len(panel_centres_in_lab_frame)
            try:
                gp_centroid = matrix.col(
                    self._groups[igp].get_bidirectional_ray_intersection(
                        centroid))
                dorg = go + gp_centroid[0] * d1 + gp_centroid[1] * d2
            except RuntimeError:  # workaround for a group frame that passes through
                # the origin
                dorg = matrix.col((0.0, 0.0, 0.0))

            # The offset between the end of the dorg vector and
            # each Panel origin is a coordinate matrix with elements in the basis d1,
            # d2, dn. We need also each Panel's plane directions dir1 and dir2 in
            # terms of d1, d2 and dn.
            offsets, dir1s, dir2s = [], [], []
            # FIXME these dot products would be more efficiently done using a change of
            # basis matrix instead
            for p in [detector[i] for i in pnl_ids]:
                offset = matrix.col(p.get_origin()) - dorg
                offsets.append(
                    matrix.col(
                        (offset.dot(d1), offset.dot(d2), offset.dot(dn))))
                dir1 = matrix.col(p.get_fast_axis())
                dir1_new_basis = matrix.col(
                    (dir1.dot(d1), dir1.dot(d2), dir1.dot(dn)))
                dir1s.append(dir1_new_basis)
                dir2 = matrix.col(p.get_slow_axis())
                dir2_new_basis = matrix.col(
                    (dir2.dot(d1), dir2.dot(d2), dir2.dot(dn)))
                dir2s.append(dir2_new_basis)

            # The offsets and directions in the d1, d2, dn basis are fixed
            # quantities, not dependent on parameter values. Keep these as separate
            # sub-lists for each group
            self._offsets.append(offsets)
            self._dir1s.append(dir1s)
            self._dir2s.append(dir2s)

            # Set up the initial state for this group. This is the basis d1, d2, dn,
            # plus the offset locating the origin of the initial group frame
            gp_offset = go - dorg  # lab frame basis
            # FIXME another set of dot products better done by a matrix multiplication
            gp_offset = matrix.col((gp_offset.dot(d1), gp_offset.dot(d2),
                                    gp_offset.dot(dn)))  # d1,d2,dn basis
            istate.append({
                "d1": d1,
                "d2": d2,
                "dn": dn,
                "gp_offset": gp_offset
            })

            # set up the parameters.
            # distance from lab origin to ref_panel plane along its normal,
            # in initial orientation
            distance = self._groups[igp].get_directed_distance()
            dist = Parameter(distance, dn, "length (mm)",
                             "Group{}Dist".format(igp + 1))

            # shift in the detector model plane to locate dorg, in initial
            # orientation
            shift = dorg - dn * distance
            shift1 = Parameter(shift.dot(d1), d1, "length (mm)",
                               "Group{}Shift1".format(igp + 1))
            shift2 = Parameter(shift.dot(d2), d2, "length (mm)",
                               "Group{}Shift2".format(igp + 1))

            # rotations of the plane through its origin about:
            # 1) axis normal to initial orientation
            # 2) d1 axis of initial orientation
            # 3) d2 axis of initial orientation
            tau1 = Parameter(0, dn, "angle (mrad)",
                             "Group{}Tau1".format(igp + 1))
            tau2 = Parameter(0, d1, "angle (mrad)",
                             "Group{}Tau2".format(igp + 1))
            tau3 = Parameter(0, d2, "angle (mrad)",
                             "Group{}Tau3".format(igp + 1))

            # extend the parameter list with those pertaining to this group
            p_list.extend([dist, shift1, shift2, tau1, tau2, tau3])
            self._group_ids_by_parameter.extend([igp] * 6)

        # set up the base class
        ModelParameterisation.__init__(
            self,
            detector,
            istate,
            p_list,
            experiment_ids=experiment_ids,
            is_multi_state=True,
        )

        # call compose to calculate all the derivatives
        self.compose()
Example #4
0
  def __init__(self, detector, experiment_ids=None, level=0):
    """The additional 'level' argument selects which level of the detector
    hierarchy is chosen to determine panel groupings that are treated as
    separate rigid blocks."""
    if experiment_ids is None:
      experiment_ids = [0]

    try:
      h = detector.hierarchy()
    except AttributeError:
      print "This detector does not have a hierarchy"
      raise

    # list the panel groups at the chosen level
    try:
      self._groups = get_panel_groups_at_depth(h, level)
    except AttributeError:
      print "Cannot access the hierarchy at the depth level={0}".format(level)
      raise

    # collect the panel ids for each Panel within the groups
    panels = [p for p in detector]
    self._panel_ids_by_group = [get_panel_ids_at_root(panels, g) for g in self._groups]

    p_list = []
    self._group_ids_by_parameter = []
    istate = []
    self._offsets = []
    self._dir1s = []
    self._dir2s = []

    # loop over the groups, collecting initial parameters and states
    for igp, pnl_ids in enumerate(self._panel_ids_by_group):

      panel_centres_in_lab_frame = []
      for i in pnl_ids:
        pnl = detector[i]
        im_size = pnl.get_image_size_mm()
        cntr = matrix.col(pnl.get_origin()) + \
            0.5 * matrix.col(pnl.get_fast_axis()) * im_size[0] + \
            0.5 * matrix.col(pnl.get_slow_axis()) * im_size[1]
        panel_centres_in_lab_frame.append(cntr)

      # get some vectors we need from the group
      go = matrix.col(self._groups[igp].get_origin())
      d1 = matrix.col(self._groups[igp].get_fast_axis())
      d2 = matrix.col(self._groups[igp].get_slow_axis())
      dn = matrix.col(self._groups[igp].get_normal())

      # we choose the dorg vector for this group to terminate on the group's
      # frame, at a point that we consider close to the centre of the group of
      # panels. This point is defined by taking the 3D centroid of the panel
      # centres then projecting that point onto the group frame.
      centroid = reduce(lambda a,b: a+b, panel_centres_in_lab_frame) / len(
        panel_centres_in_lab_frame)
      try:
        gp_centroid = matrix.col(self._groups[igp].get_ray_intersection(centroid))
        dorg = go + gp_centroid[0] * d1 + gp_centroid[1] * d2
      except RuntimeError: # workaround for a group frame that passes through
        # the origin
        dorg = matrix.col((0., 0., 0.))

      # The offset between the end of the dorg vector and
      # each Panel origin is a coordinate matrix with elements in the basis d1,
      # d2, dn. We need also each Panel's plane directions dir1 and dir2 in
      # terms of d1, d2 and dn.
      offsets, dir1s, dir2s = [], [], []
      #FIXME these dot products would be more efficiently done using a change of
      # basis matrix instead
      for p in [detector[i] for i in pnl_ids]:
        offset = matrix.col(p.get_origin()) - dorg
        offsets.append(matrix.col((offset.dot(d1),
                                   offset.dot(d2),
                                   offset.dot(dn))))
        dir1 = matrix.col(p.get_fast_axis())
        dir1_new_basis = matrix.col((dir1.dot(d1),
                                     dir1.dot(d2),
                                     dir1.dot(dn)))
        dir1s.append(dir1_new_basis)
        dir2 = matrix.col(p.get_slow_axis())
        dir2_new_basis = matrix.col((dir2.dot(d1),
                                     dir2.dot(d2),
                                     dir2.dot(dn)))
        dir2s.append(dir2_new_basis)

      # The offsets and directions in the d1, d2, dn basis are fixed
      # quantities, not dependent on parameter values. Keep these as separate
      # sub-lists for each group
      self._offsets.append(offsets)
      self._dir1s.append(dir1s)
      self._dir2s.append(dir2s)

      # Set up the initial state for this group. This is the basis d1, d2, dn,
      # plus the offset locating the origin of the initial group frame
      gp_offset = go - dorg # lab frame basis
      #FIXME another set of dot products better done by a matrix multiplication
      gp_offset = matrix.col((gp_offset.dot(d1),
                              gp_offset.dot(d2),
                              gp_offset.dot(dn))) # d1,d2,dn basis
      istate.append({'d1':d1, 'd2':d2, 'dn':dn, 'gp_offset':gp_offset})

      # set up the parameters.
      # distance from lab origin to ref_panel plane along its normal,
      # in initial orientation
      distance = self._groups[igp].get_directed_distance()
      dist = Parameter(distance,
        dn, 'length (mm)', 'Group{0}Dist'.format(igp + 1))

      # shift in the detector model plane to locate dorg, in initial
      # orientation
      shift = dorg - dn * distance
      shift1 = Parameter(shift.dot(d1), d1,
        'length (mm)', 'Group{0}Shift1'.format(igp + 1))
      shift2 = Parameter(shift.dot(d2), d2,
        'length (mm)', 'Group{0}Shift2'.format(igp + 1))

      # rotations of the plane through its origin about:
      # 1) axis normal to initial orientation
      # 2) d1 axis of initial orientation
      # 3) d2 axis of initial orientation
      tau1 = Parameter(0, dn, 'angle (mrad)', 'Group{0}Tau1'.format(igp + 1))
      tau2 = Parameter(0, d1, 'angle (mrad)', 'Group{0}Tau2'.format(igp + 1))
      tau3 = Parameter(0, d2, 'angle (mrad)', 'Group{0}Tau3'.format(igp + 1))

      # extend the parameter list with those pertaining to this group
      p_list.extend([dist, shift1, shift2, tau1, tau2, tau3])
      self._group_ids_by_parameter.extend([igp] * 6)

    # set up the base class
    ModelParameterisation.__init__(self, detector, istate, p_list,
                                   experiment_ids=experiment_ids,
                                   is_multi_state=True)

    # call compose to calculate all the derivatives
    self.compose()

    return