Beispiel #1
0
def main():
  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  import libtbx.load_env

  usage = "%s [options] image_*.cbf" % (
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_datablocks_from_images=True,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)

  if len(datablocks) == 0:
    parser.print_help()
    exit()

  datablock = datablocks[0]
  imageset = datablock.extract_imagesets()[0]
  stability_fft(imageset, params)
  def run(self):
    '''Execute the script.'''
    from dials.array_family import flex
    from dials.util.options import flatten_datablocks
    from dials.util.options import flatten_reflections
    from time import time
    from dials.util import log
    from logging import info, debug
    from libtbx.utils import Sorry
    start_time = time()

    # Parse the command line
    params, options = self.parser.parse_args(show_diff_phil=False)

    # Configure the logging
    log.config(
      params.verbosity,
      info=params.output.log,
      debug=params.output.debug_log)

    from dials.util.version import dials_version
    info(dials_version())

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      info('The following parameters have been modified:\n')
      info(diff_phil)

    # Ensure we have a data block
    datablocks = flatten_datablocks(params.input.datablock)
    reflections = flatten_reflections(params.input.reflections)
    if len(datablocks) == 0 and len(reflections) == 0:
      self.parser.print_help()
      return
    elif len(datablocks) != len(reflections):
      raise Sorry("Must have same number of datablocks and reflection tables")

    # Combine the datablocks and reflections
    datablock, reflections = combine(
      datablocks,
      reflections,
      params)

    # Save the reflections to file
    info('\n' + '-' * 80)
    reflections.as_pickle(params.output.reflections)
    info('Saved {0} reflections to {1}'.format(
        len(reflections), params.output.reflections))

    # Save the datablock
    from dxtbx.datablock import DataBlockDumper
    info('Saving datablocks to {0}'.format(
      params.output.datablock))
    dump = DataBlockDumper(datablocks)
    dump.as_file(params.output.datablock)


    # Print the time
    info("Time Taken: %f" % (time() - start_time))
Beispiel #3
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  import libtbx.load_env

  usage = "%s [options] image_*.cbf" % (
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_datablocks_from_images=True,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)

  if len(datablocks) == 0 and len(experiments) == 0 and len(reflections) == 0:
    parser.print_help()
    exit()

  assert(len(datablocks) == 1)

  datablock = datablocks[0]
  imagesets = datablock.extract_imagesets()

  assert(len(imagesets) == 1)

  imageset = imagesets[0]

  images = imageset.indices()
  if params.frames:
    images = params.frames

  d_spacings = []
  intensities = []
  sigmas = []

  for indx in images:
    print 'For frame %d:' % indx
    d, I, sig = background(imageset, indx, n_bins=params.n_bins)

    print '%8s %8s %8s' % ('d', 'I', 'sig')
    for j in range(len(I)):
      print '%8.3f %8.3f %8.3f' % (d[j], I[j], sig[j])

    d_spacings.append(d)
    intensities.append(I)
    sigmas.append(sig)

  if params.plot:
    from matplotlib import pyplot
    fig = pyplot.figure()
    for d, I, sig in zip(d_spacings, intensities, sigmas):
      ds2 = 1/flex.pow2(d)
      pyplot.plot(ds2, I)

    pyplot.show()
Beispiel #4
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks

  parser = OptionParser(
    read_datablocks=True,
    read_datablocks_from_images=True,
    phil=phil_scope,
    check_format=True)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)
  assert len(datablocks) == 1
  imagesets = datablocks[0].extract_imagesets()

  img_count = 0
  import time
  t0 = time.time()
  for imgset in imagesets:
    for i in range(len(imgset)):
      if params.data == 'raw':
        imgset.get_raw_data(i)
      else:
        imgset.get_corrected_data(i)
      img_count += 1
      print "Read %i images" %img_count
  t1 = time.time()
  t = t1 - t0
  print "Read %i images in %.2fs (%.1f images/s)" %(
    img_count, t, img_count/t)

  return
Beispiel #5
0
    def run(self):
        from dials.util.command_line import Command
        from dials.util.options import flatten_datablocks

        # Parse the command line
        params, options = self.parser.parse_args(show_diff_phil=True)

        # Ensure we have a data block
        datablocks = flatten_datablocks(params.input.datablock)
        if len(datablocks) == 0:
            self.parser.print_help()
            return
        elif len(datablocks) != 1:
            raise RuntimeError("only 1 datablock can be processed at a time")

        if not params.rs_mapper.map_file:
            raise RuntimeError("Please specify output map file (map_file=)")
        else:
            self.map_file = params.rs_mapper.map_file

        self.datablocks = flatten_datablocks(params.input.datablock)
        if len(self.datablocks) == 0:
            self.parser.print_help()
            return

        self.reverse_phi = params.rs_mapper.reverse_phi
        self.grid_size = params.rs_mapper.grid_size
        self.max_resolution = params.rs_mapper.max_resolution

        self.grid = flex.double(flex.grid(self.grid_size, self.grid_size, self.grid_size), 0)
        self.cnts = flex.int(flex.grid(self.grid_size, self.grid_size, self.grid_size), 0)

        for datablock in self.datablocks:
            for imageset in datablock.extract_imagesets():
                self.process_imageset(imageset)

        recviewer.normalize_voxels(self.grid, self.cnts)
        uc = uctbx.unit_cell((self.grid_size, self.grid_size, self.grid_size, 90, 90, 90))
        ccp4_map.write_ccp4_map(
            self.map_file,
            uc,
            sgtbx.space_group("P1"),
            (0, 0, 0),
            self.grid.all(),
            self.grid,
            flex.std_string(["cctbx.miller.fft_map"]),
        )
def run(args):
  import libtbx.load_env
  from libtbx.utils import Sorry
  from dials.util import log
  usage = "%s [options] datablock.json strong.pickle" %libtbx.env.dispatcher_name

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_reflections=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=False)
  datablocks = flatten_datablocks(params.input.datablock)
  reflections = flatten_reflections(params.input.reflections)

  if len(datablocks) == 0 or len(reflections) == 0:
    parser.print_help()
    exit(0)

  # Configure the logging
  log.config(
    info=params.output.log,
    debug=params.output.debug_log)

  # Log the diff phil
  diff_phil = parser.diff_phil.as_str()
  if diff_phil is not '':
    info('The following parameters have been modified:\n')
    info(diff_phil)

  imagesets = []
  for datablock in datablocks:
    imagesets.extend(datablock.extract_imagesets())

  assert len(imagesets) > 0
  assert len(reflections) == len(imagesets)

  if params.scan_range is not None and len(params.scan_range) > 0:
    reflections = [
      filter_reflections_by_scan_range(refl, params.scan_range)
      for refl in reflections]

  dps_params = dps_phil_scope.extract()
  # for development, we want an exhaustive plot of beam probability map:
  dps_params.indexing.plot_search_scope = params.plot_search_scope
  dps_params.indexing.mm_search_scope = params.mm_search_scope

  new_detector, new_beam = discover_better_experimental_model(
    imagesets, reflections, params, dps_params, nproc=params.nproc,
    wide_search_binning=params.wide_search_binning)
  for imageset in imagesets:
    imageset.set_detector(new_detector)
    imageset.set_beam(new_beam)
  from dxtbx.serialize import dump
  dump.datablock(datablock, params.output.datablock)
Beispiel #7
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  import libtbx.load_env

  usage = "%s [options] datablock.json" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_experiments=True,
    check_format=True,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)
  experiments = flatten_experiments(params.input.experiments)

  if (len(datablocks) == 0 and len(experiments) == 0):
    parser.print_help()
    exit(0)

  if len(datablocks) == 0 and len(experiments) > 0:
    imagesets = experiments.imagesets()
  else:
    imagesets = []
    for datablock in datablocks:
      imagesets.extend(datablock.extract_imagesets())

  assert len(imagesets) == 1
  imageset = imagesets[0]
  gonio = imageset.get_goniometer()
  if not params.detector_distance:
    detector = imageset.get_detector()
    if len(detector) > 1:
      params.detector_distance = detector.hierarchy().get_directed_distance()
    else:
      params.detector_distance = detector[0].get_directed_distance()
  if params.angle:
    assert len(params.angle) == len(gonio.get_angles())
  else:
    for angle in gonio.get_angles():
      params.angle.append(angle)

  import wxtbx.app
  a = wxtbx.app.CCTBXApp(0)
  a.settings = params
  f = ExperimentViewer(
    None, -1, "Experiment viewer", size=(1024,768))
  f.load_imageset(imageset)
  f.Show()
  a.SetTopWindow(f)
  #a.Bind(wx.EVT_WINDOW_DESTROY, lambda evt: tb_icon.Destroy(), f)
  a.MainLoop()
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  from dials.util.options import flatten_reflections

  parser = OptionParser(
    phil=master_phil,
    read_datablocks=True,
    read_experiments=True,
    read_reflections=True,
    check_format=False)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)
  experiments = flatten_experiments(params.input.experiments)
  reflections = flatten_reflections(params.input.reflections)[0]
  if len(params.input.reflections) == 2:
    reflections2 = flatten_reflections(params.input.reflections)[1]
  else:
    reflections2 = None

  # find the reflections in the second set that DO NOT match those in the
  # first set
  mask, _ = reflections2.match_with_reference(reflections)
  reflections2 = reflections2.select(~mask)
  print "{0} reflections from the second set do not match the first". \
    format(len(reflections2))
  #reflections2 = reflections2.select(reflections2["miller_index"] == (-7,2,-25))

  if len(datablocks) == 0:
    if len(experiments) > 0:
      imagesets = experiments.imagesets()
    else:
      parser.print_help()
      return
  elif len(datablocks) > 1:
    raise Sorry("Only one DataBlock can be processed at a time")
  else:
    imagesets = datablocks[0].extract_imagesets()

  if len(imagesets) > 1:
    raise Sorry("Only one ImageSet can be processed at a time")
  imageset = imagesets[0]

  import wxtbx.app
  a = wxtbx.app.CCTBXApp(0)
  a.settings = params
  f = PredRelpViewer(
    None, -1, "Prediction reciprocal lattice viewer", size=(1024,768))
  f.load_reflections2(reflections2)
  f.load_models(imageset, reflections)
  f.Show()
  a.SetTopWindow(f)
  #a.Bind(wx.EVT_WINDOW_DESTROY, lambda evt: tb_icon.Destroy(), f)
  a.MainLoop()
Beispiel #9
0
def run(args):
  import libtbx.load_env
  from libtbx.utils import Sorry
  from dials.util import log
  from logging import info
  import cPickle as pickle
  usage = "%s [options] datablock.json strong.pickle" % \
    libtbx.env.dispatcher_name

  # Create the option parser
  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_reflections=True,
    read_datablocks=True,
    check_format=False,
    epilog=help_message)

  # Get the parameters
  params, options = parser.parse_args(show_diff_phil=False)

  # Configure the log
  log.config(
    params.verbosity,
    info='dials.find_hot_pixels.log',
    debug='dials.find_hot_pixels.debug.log')

  # Log the diff phil
  diff_phil = parser.diff_phil.as_str()
  if diff_phil is not '':
    info('The following parameters have been modified:\n')
    info(diff_phil)

  datablocks = flatten_datablocks(params.input.datablock)
  reflections = flatten_reflections(params.input.reflections)

  if len(datablocks) == 0 and len(reflections) == 0:
    parser.print_help()
    exit(0)

  if len(datablocks) > 1:
    raise Sorry("Only one DataBlock can be processed at a time")
  else:
    imagesets = datablocks[0].extract_imagesets()
  if len(reflections) == 0:
    raise Sorry("No reflection lists found in input")
  if len(reflections) > 1:
    raise Sorry("Multiple reflections lists provided in input")

  assert(len(reflections) == 1)
  reflections = reflections[0]

  mask = hot_pixel_mask(imagesets[0], reflections)
  pickle.dump(mask, open(params.output.mask, 'w'), pickle.HIGHEST_PROTOCOL)

  print 'Wrote hot pixel mask to %s' % params.output.mask
  return
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  from dials.util.options import flatten_reflections
  import libtbx.load_env

  usage = "%s [options] datablock.json reflections.pickle" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_experiments=True,
    read_reflections=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)
  experiments = flatten_experiments(params.input.experiments)
  reflections = flatten_reflections(params.input.reflections)

  if (len(datablocks) == 0 and len(experiments) == 0) or len(reflections) == 0:
    parser.print_help()
    exit(0)

  if len(datablocks) == 0 and len(experiments) > 0:
    imagesets = experiments.imagesets()
  else:
    imagesets = []
    for datablock in datablocks:
      imagesets.extend(datablock.extract_imagesets())

  if len(reflections) > 1:
    assert len(reflections) == len(imagesets)
    from scitbx.array_family import flex
    for i in range(len(reflections)):
      reflections[i]['imageset_id'] = flex.int(len(reflections[i]), i)
      if i > 0:
        reflections[0].extend(reflections[i])

  reflections = reflections[0]

  import wxtbx.app
  a = wxtbx.app.CCTBXApp(0)
  a.settings = params
  f = ReciprocalLatticeViewer(
    None, -1, "Reflection data viewer", size=(1024,768))
  f.load_models(imagesets, reflections)
  f.Show()
  a.SetTopWindow(f)
  #a.Bind(wx.EVT_WINDOW_DESTROY, lambda evt: tb_icon.Destroy(), f)
  a.MainLoop()
Beispiel #11
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  from dials.util.options import flatten_reflections
  from dials.util import log

  usage = "%s [options] datablock.json reflections.pickle" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_experiments=True,
    read_reflections=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args()
  datablocks = flatten_datablocks(params.input.datablock)
  experiments = flatten_experiments(params.input.experiments)
  reflections = flatten_reflections(params.input.reflections)

  if (len(datablocks) == 0 and len(experiments) == 0) or len(reflections) == 0:
    parser.print_help()
    exit(0)

  ## Configure the logging
  #log.config(info='dials.rl_png.log')

  # Log the diff phil
  diff_phil = parser.diff_phil.as_str()
  if diff_phil is not '':
    logger.info('The following parameters have been modified:\n')
    logger.info(diff_phil)

  reflections = reflections[0]

  if len(datablocks) == 0 and len(experiments) > 0:
    imagesets = experiments.imagesets()
  else:
    imagesets = []
    for datablock in datablocks:
      imagesets.extend(datablock.extract_imagesets())

  f = ReciprocalLatticeJson(settings=params)
  f.load_models(imagesets, reflections)
  f.as_json(filename=params.output.json, compact=params.output.compact)
  print
Beispiel #12
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  import libtbx.load_env

  usage = "%s [options] datablock.json | experiments.json" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_experiments=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  experiments = flatten_experiments(params.input.experiments)
  datablocks = flatten_datablocks(params.input.datablock)

  if len(experiments) == 0 and len(datablocks) == 0:
    parser.print_help()
    exit(0)

  from dials.command_line.dials_import import ManualGeometryUpdater
  update_geometry = ManualGeometryUpdater(params)


  if len(experiments):
    imagesets = experiments.imagesets()

  elif len(datablocks):

    assert len(datablocks) == 1
    imagesets = datablocks[0].extract_imageset()

  for imageset in imagesets:
    imageset_new = update_geometry(imageset)
    imageset.set_detector(imageset_new.get_detector())
    imageset.set_beam(imageset_new.get_beam())
    imageset.set_goniometer(imageset_new.get_goniometer())
    imageset.set_scan(imageset_new.get_scan())

  from dxtbx.serialize import dump
  if len(experiments):
    print "Saving modified experiments to %s" %params.output.experiments
    dump.experiment_list(experiments, params.output.experiments)
  elif len(datablocks):
    raise NotImplemented
    def run(self):
        params, options = self.parser.parse_args(show_diff_phil=True)
        datablocks = flatten_datablocks(params.input.datablock)
        experiments = flatten_experiments(params.input.experiments)
        reflections = flatten_reflections(params.input.reflections)
        if len(reflections) > 0:
            reflections = reflections[0]
        else:
            reflections = None

        all_detectors = []
        for db in datablocks:
            all_detectors.extend(db.unique_detectors())

        all_detectors.extend(experiments.detectors())
        display_detectors(all_detectors[: min(len(all_detectors), 10)], reflections=reflections)
Beispiel #14
0
  def run(self):
    ''' Parse the options. '''
    from dxtbx.datablock import DataBlockFactory
    from dxtbx.datablock import DataBlockTemplateImporter
    from dials.util.options import flatten_datablocks
    from dials.util import log
    from logging import info, debug
    import cPickle as pickle
    from libtbx.utils import Sorry

    # Parse the command line arguments
    params, options = self.parser.parse_args(show_diff_phil=False)
    datablocks = flatten_datablocks(params.input.datablock)

    # Configure logging
    log.config(
      params.verbosity,
      info=params.output.log,
      debug=params.output.debug_log)
    from dials.util.version import dials_version
    info(dials_version())

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      info('The following parameters have been modified:\n')
      info(diff_phil)

    # Load reference geometry
    reference_detector = None
    reference_beam = None
    if params.input.reference_geometry is not None:
      from dxtbx.serialize import load
      try:
        experiments = load.experiment_list(
          params.input.reference_geometry, check_format=False)
        assert len(experiments.detectors()) == 1
        assert len(experiments.beams()) == 1
        reference_detector = experiments.detectors()[0]
        reference_beam = experiments.beams()[0]
      except Exception, e:
        datablock = load.datablock(params.input.reference_geometry)
        assert len(datablock) == 1
        imageset = datablock[0].extract_imagesets()[0]
        reference_detector = imageset.get_detector()
        reference_beam = imageset.get_beam()
Beispiel #15
0
def run(args):
  import libtbx.load_env
  usage = """\
%s datablock.json reflections.pickle [options]""" %libtbx.env.dispatcher_name
  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  from dials.util.options import flatten_reflections
  from scitbx.array_family import flex
  from scitbx import matrix
  from libtbx.phil import command_line
  from libtbx.utils import Sorry
  parser = OptionParser(
    usage=usage,
    phil=master_phil_scope,
    read_datablocks=True,
    read_experiments=True,
    read_reflections=True,
    check_format=False)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)
  reflections = flatten_reflections(params.input.reflections)
  experiments = flatten_experiments(params.input.experiments)
  if len(datablocks) == 1:
    imageset = datablocks[0].extract_imagesets()[0]
  elif len(datablocks) > 1:
    raise Sorry("Only one DataBlock can be processed at a time")
  elif len(experiments.imagesets()) > 0:
    imageset = experiments.imagesets()[0]
    imageset.set_detector(experiments[0].detector)
    imageset.set_beam(experiments[0].beam)
    imageset.set_goniometer(experiments[0].goniometer)
  else:
    parser.print_help()
    return

  detector = imageset.get_detector()
  scan = imageset.get_scan()

  panel_origin_shifts = {0: (0,0,0)}
  try:
    hierarchy = detector.hierarchy()
  except AttributeError, e:
    hierarchy = None
Beispiel #16
0
  def __call__(self):
    '''
    Import the datablocks

    '''
    from dxtbx.datablock import DataBlockTemplateImporter
    from dxtbx.datablock import DataBlockFactory
    from dials.util.options import flatten_datablocks
    from libtbx.utils import Sorry

    # Get the datablocks
    datablocks = flatten_datablocks(self.params.input.datablock)

    # Check we have some filenames
    if len(datablocks) == 0:

      format_kwargs = {
        'dynamic_shadowing' : self.params.format.dynamic_shadowing
      }

      # Check if a template has been set and print help if not, otherwise try to
      # import the images based on the template input
      if len(self.params.input.template) > 0:
        importer = DataBlockTemplateImporter(
          self.params.input.template,
          max(self.params.verbosity-1, 0),
          format_kwargs=format_kwargs)
        datablocks = importer.datablocks
        if len(datablocks) == 0:
          raise Sorry('No datablocks found matching template %s' % self.params.input.template)
      elif len(self.params.input.directory) > 0:
        datablocks = DataBlockFactory.from_filenames(
          self.params.input.directory,
          max(self.params.verbosity-1, 0),
          format_kwargs=format_kwargs)
        if len(datablocks) == 0:
          raise Sorry('No datablocks found in directories %s' % self.params.input.directory)
      else:
        raise Sorry('No datablocks found')
    if len(datablocks) > 1:
      raise Sorry("More than 1 datablock found")

    # Return the datablocks
    return datablocks[0]
Beispiel #17
0
def run(args):
  import libtbx.load_env
  from libtbx.utils import Sorry
  usage = "%s [options] datablock.json" %libtbx.env.dispatcher_name

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    check_format=True,
    read_datablocks_from_images=True,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=False)

  ## Configure the logging
  #log.config(
    #params.verbosity, info='dials.estimate_gain.log', debug='dials.estimate_gain.debug.log')

  # Log the diff phil
  diff_phil = parser.diff_phil.as_str()
  if diff_phil is not '':
    print 'The following parameters have been modified:\n'
    print diff_phil

  datablocks = flatten_datablocks(params.input.datablock)

  if len(datablocks) == 0:
    parser.print_help()
    return
  elif len(datablocks) > 1:
    raise Sorry("Only one DataBlock can be processed at a time")
  else:
    imagesets = []
    for datablock in datablocks:
      imagesets.extend(datablock.extract_imagesets())

  assert len(imagesets) == 1
  imageset = imagesets[0]
  estimate_gain(imageset, params.kernel_size, params.output.gain_map)

  return
Beispiel #18
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  import libtbx.load_env

  usage = "%s [options] datablock.json" % (
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_datablocks_from_images=True,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)

  datablocks = flatten_datablocks(params.input.datablock)

  if len(datablocks) == 0:
    parser.print_help()
    exit()

  assert(len(datablocks) == 1)

  datablock = datablocks[0]
  imagesets = datablock.extract_imagesets()

  assert(len(imagesets) == 1)

  imageset = imagesets[0]

  images = params.image

  for j, img_a in enumerate(images[:-1]):
    for img_b in images[j+1:]:
      a = imageset.get_raw_data(img_a)[0]
      b = imageset.get_raw_data(img_b)[0]
      n, cc = image_correlation(a, b)
      print '%5d %5d %7d %.4f' % (img_a, img_b, n, cc)
Beispiel #19
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  from dials.util.options import flatten_reflections
  import libtbx.load_env

  usage = "%s [options] datablock.json reflections.pickle" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_experiments=True,
    read_reflections=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)
  experiments = flatten_experiments(params.input.experiments)
  reflections = flatten_reflections(params.input.reflections)

  if (len(datablocks) == 0 and len(experiments) == 0) or len(reflections) == 0:
    parser.print_help()
    exit(0)

  reflections = reflections[0]

  if len(datablocks) == 0 and len(experiments) > 0:
    imagesets = experiments.imagesets()
  else:
    imagesets = []
    for datablock in datablocks:
      imagesets.extend(datablock.extract_imagesets())

  spot_resolution_shells(imagesets, reflections, params)
Beispiel #20
0
  def run(self):
    ''' Run the script. '''
    from dials.util.options import flatten_datablocks
    from dxtbx.datablock import DataBlockDumper
    from libtbx.utils import Sorry
    import cPickle as pickle

    # Parse the command line arguments
    params, options = self.parser.parse_args(show_diff_phil=True)
    datablocks = flatten_datablocks(params.input.datablock)

    # Check number of args
    if len(datablocks) == 0:
      self.parser.print_help()
      return

    # Check the mask file is given
    if params.input.mask is None:
      self.parser.print_help()
      return

    # Check nbumber of datablocks
    if len(datablocks) != 1:
      raise Sorry('exactly 1 datablock must be specified')

    # Get the imageset
    datablock = datablocks[0]
    imagesets = datablock.extract_imagesets()
    if len(imagesets) != 1:
      raise Sorry('datablock must contain exactly 1 imageset')
    imageset = imagesets[0]

    # Set the lookup
    imageset.external_lookup.mask.filename = params.input.mask

    # Dump the datablock
    print "Writing datablock to %s" % params.output.datablock
    dump = DataBlockDumper(datablock)
    dump.as_json(filename=params.output.datablock)
Beispiel #21
0
  def run(self):
    # load at least two detectors from the command line
    params, options = self.parser.parse_args(show_diff_phil=True)
    datablocks  = flatten_datablocks(params.input.datablock)
    experiments = flatten_experiments(params.input.experiments)

    # collect all detectors found
    all_detectors = []
    for db in datablocks:
      all_detectors.extend(db.unique_detectors())

    all_detectors.extend(experiments.detectors())

    assert len(all_detectors) >= 2

    a = all_detectors[0]
    b = all_detectors[1]

    level = 0
    pga = a.hierarchy()
    pgb = b.hierarchy()

    while True:
      # starting at the top of the hierarchy, show diffs in local origins at each level
      print "Level", level
      oa = col(pga.get_local_origin())
      ob = col(pgb.get_local_origin())

      print "  Detector a", oa.elems
      print "  Detector b", ob.elems
      print "  Diffs", (ob-oa).elems

      if hasattr(pga, 'children'):
        pga = pga[0]
        pgb = pgb[0]
        level += 1
      else:
        break
Beispiel #22
0
  def open_viewer(self, filename):
    import wx
    from dials.util.options import OptionParser
    from dials.util.options import flatten_datablocks
    import libtbx.load_env
    parser = OptionParser(
      phil=phil_scope,
      read_datablocks=True,
      read_experiments=True,
      read_reflections=True,
      read_datablocks_from_images=True)
    params, options = parser.parse_args(args=[filename])
    datablocks = flatten_datablocks(params.input.datablock)
    imagesets = datablocks[0].extract_imagesets()
    self.runner = iv.Script(
      params=params.image_viewer,
      reflections=[],
      imagesets=imagesets,
      crystals=None)
    # Run the script

    self.thread = threading.Thread(target=self.runner)
    self.thread.start()
Beispiel #23
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  import libtbx.load_env

  usage = "%s [options] datablock.json reference=reference_datablock.json" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)

  if len(datablocks) == 0:
    parser.print_help()
    exit()

  # Load reference geometry
  reference_detector = None
  if params.input.reference is not None:
    from dxtbx.serialize import load
    try:
      reference_experiments = load.experiment_list(
        params.input.reference, check_format=False)
      assert len(reference_experiments.detectors()) == 1
      reference_detector = reference_experiments.detectors()[0]
    except Exception, e:
      reference_datablocks = load.datablock(params.input.reference)
      assert len(reference_datablocks) == 1
      imageset = reference_datablocks[0].extract_imagesets()[0]
      reference_detector = imageset.get_detector()
Beispiel #24
0
def run():
  import sys
  import libtbx.load_env

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks

  usage = "%s [options] image_*.cbf" %libtbx.env.dispatcher_name

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_datablocks_from_images=True,
    epilog=help_message
  )

  params, options, args = parser.parse_args(
    show_diff_phil=True, return_unhandled=True)

  n_images = params.merge_n_images
  out_prefix = params.output.image_prefix
  datablocks = flatten_datablocks(params.input.datablock)

  if len(datablocks) == 0:
    parser.print_help()
    return

  if len(datablocks) > 1:
    raise Sorry("Only one DataBlock can be processed at a time")
  else:
    imagesets = datablocks[0].extract_imagesets()
    assert len(imagesets) == 1
    imageset = imagesets[0]

  merge_cbf(imageset, n_images, out_prefix=out_prefix)
Beispiel #25
0
    def run(self):
        """Parse the options."""
        from dials.util.options import flatten_experiments, flatten_datablocks

        # Parse the command line arguments
        params, options = self.parser.parse_args(show_diff_phil=True)
        experiments = flatten_experiments(params.input.experiments)
        datablocks = flatten_datablocks(params.input.datablock)

        # Verify inputs
        if len(experiments) == 0 and len(datablocks) == 0:
            print("No experiments found")
            return

        if len(experiments) > 0 and len(datablocks) > 0:
            print("Please analyze only one datablock or experiment list")
            return

        # Find the detector of interest
        if len(experiments) > 0:
            print(
                "Found %d experiments, doing analysis on experiment 0"
                % len(experiments)
            )
            detector = experiments[0].detector
        else:
            print(
                "Found %d datablocks, doing analysis on datablock 0" % len(datablocks)
            )
            detector = datablocks[0].unique_detectors()[0]

        # Build a list of all 2x1 sensors in the detector
        sensors = []

        def recursive_find_sensors(group):
            if len(group) == 2:
                # Verify the children of this group are Panels, not PanelGroups
                assert [not hasattr(child, "children") for child in group].count(
                    False
                ) == 0
                sensors.append(group)

            else:
                for child in group:
                    recursive_find_sensors(child)

        recursive_find_sensors(detector.hierarchy())

        print("Found %d 2x1 sensors" % len(sensors))

        pixel_gaps = flex.double()
        bottom_gaps = flex.double()
        angles = flex.double()
        sensor_angles = flex.double()

        for sensor in sensors:
            a = sensor[0]
            b = sensor[1]

            # Get initial vectors
            center_a = col(a.get_local_origin())
            center_b = col(b.get_local_origin())

            pixel_size_a = a.get_pixel_size()[0]
            pixel_size_b = b.get_pixel_size()[0]
            assert pixel_size_a == pixel_size_b
            pixel_size = pixel_size_a

            width_a = a.get_image_size()[0]
            width_b = b.get_image_size()[0]
            assert width_a == width_b
            width = width_a * pixel_size

            # Calculate statistics
            pixel_gaps.append(
                (abs((center_a - center_b).dot(col((1, 0, 0)))) - width) / pixel_size
            )
            bottom_gaps.append(
                ((center_a - center_b).dot(col((0, 1, 0))) - 0) / pixel_size
            )

            slow_a = col(a.get_slow_axis())
            slow_b = col(b.get_slow_axis())

            angle = slow_a.angle(slow_b, deg=True)
            if slow_a.cross(slow_b)[1] < 0:
                angle = -angle
            angles.append(angle)

            a_to_b = center_b - center_a

            angle = a_to_b.angle(col((1, 0, 0)), deg=True)
            if a_to_b.cross(col((1, 0, 0)))[1] < 0:
                angle = -angle
            sensor_angles.append(angle)

        pg_stats = flex.mean_and_variance(pixel_gaps)
        bg_stats = flex.mean_and_variance(bottom_gaps)
        an_stats = flex.mean_and_variance(angles)
        sa_stats = flex.mean_and_variance(sensor_angles)
        for x in xrange(len(pixel_gaps)):
            print(
                "%2d. Px gap: %5.3f  vertical: %6.3f  asic_angle: %6.3f  sensor_angle: %6.3f"
                % (x, pixel_gaps[x], bottom_gaps[x], angles[x], sensor_angles[x])
            )

        print("Sensor stats (means and standard deviations)")
        print(
            "3 pixel gap                        : %f, %f"
            % (pg_stats.mean(), pg_stats.unweighted_sample_standard_deviation())
        )
        print(
            "Vertical offset                    : %f, %f"
            % (bg_stats.mean(), bg_stats.unweighted_sample_standard_deviation())
        )
        print(
            "Angular deviations (between asics) : %f, %f"
            % (an_stats.mean(), an_stats.unweighted_sample_standard_deviation())
        )
        print(
            "Angular deviations (sensor)        : %f, %f"
            % (sa_stats.mean(), sa_stats.unweighted_sample_standard_deviation())
        )
Beispiel #26
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  import libtbx.load_env

  usage = "%s [options] datablock.json" %(
    libtbx.env.dispatcher_name)

  import os
  if 'DIALS_EXPORT_DO_NOT_CHECK_FORMAT' in os.environ:
    parser = OptionParser(
      usage=usage,
      phil=phil_scope,
      read_datablocks=True,
      read_experiments=True,
      check_format=False,
      epilog=help_message)

  else:
    parser = OptionParser(
      usage=usage,
      phil=phil_scope,
      read_datablocks=True,
      read_experiments=True,
      check_format=True,
      epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)
  experiments = flatten_experiments(params.input.experiments)

  if (len(datablocks) == 0 and len(experiments) == 0):
    parser.print_help()
    exit(0)

  if len(datablocks) == 0 and len(experiments) > 0:
    imagesets = experiments.imagesets()
    crystal = experiments[0].crystal
  else:
    imagesets = []
    crystal = None
    for datablock in datablocks:
      imagesets.extend(datablock.extract_imagesets())

  assert len(imagesets) == 1, len(imagesets)
  imageset = imagesets[0]
  gonio = imageset.get_goniometer()
  if not params.detector_distance:
    detector = imageset.get_detector()
    if len(detector) > 1:
      params.detector_distance = detector.hierarchy().get_directed_distance()
    else:
      params.detector_distance = detector[0].get_directed_distance()

  if gonio is not None and not isinstance(gonio, MultiAxisGoniometer):
    from dxtbx.model.goniometer import GoniometerFactory
    gonio = GoniometerFactory.multi_axis(
      axes=flex.vec3_double((gonio.get_rotation_axis(),)),
      angles=flex.double((0,)),
      names=flex.std_string(('GON_OMEGA',)),
      scan_axis=0)
    imageset.set_goniometer(gonio)

  if isinstance(gonio, MultiAxisGoniometer):
    if params.angle:
      assert len(params.angle) == len(gonio.get_angles())
    else:
      for angle in gonio.get_angles():
        params.angle.append(angle)

  import wxtbx.app
  a = wxtbx.app.CCTBXApp(0)
  a.settings = params
  f = ExperimentViewer(
    None, -1, "Experiment viewer", size=(1024,768))
  f.load_imageset(imageset, crystal=crystal)
  f.Show()
  a.SetTopWindow(f)
  a.MainLoop()
Beispiel #27
0
    from dials.util.options import flatten_datablocks
    from dials.util.options import flatten_experiments
    from dials.util.options import flatten_reflections
    import libtbx.load_env
    usage_message = """
    %s datablock.json [reflections.pickle]
  """ % libtbx.env.dispatcher_name
    parser = OptionParser(usage=usage_message,
                          phil=phil_scope,
                          read_datablocks=True,
                          read_experiments=True,
                          read_reflections=True,
                          read_datablocks_from_images=True,
                          epilog=help_message)
    params, options = parser.parse_args(show_diff_phil=True)
    datablocks = flatten_datablocks(params.input.datablock)
    experiments = flatten_experiments(params.input.experiments)
    reflections = flatten_reflections(params.input.reflections)

    if len(datablocks) == 0 and len(experiments) == 0:
        parser.print_help()
        exit(0)

    if len(datablocks) > 0:
        assert len(datablocks) == 1
        datablock = datablocks[0]
    else:
        datablock = None

    if params.mask is not None:
        from libtbx import easy_pickle
Beispiel #28
0
  def run(self):
    '''Execute the script.'''
    from dials.util.options import flatten_datablocks
    from time import time
    from dials.util import log
    from libtbx.utils import Sorry
    import datetime
    start_time = time()

    # Parse the command line
    params, options = self.parser.parse_args(show_diff_phil=False)

    # Configure the logging
    log.config(
      params.verbosity,
      info=params.output.log,
      debug=params.output.debug_log)

    from dials.util.version import dials_version
    logger.info(dials_version())

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      logger.info('The following parameters have been modified:\n')
      logger.info(diff_phil)

    # Ensure we have a data block
    datablocks = flatten_datablocks(params.input.datablock)
    if len(datablocks) == 0:
      self.parser.print_help()
      return

    # Extend the first datablock
    datablock = datablocks[0]
    for db in datablocks[1:]:
      if datablock.format_class() != db.format_class():
        raise Sorry("Datablocks must have the same format")
      datablock.extend(db)

    # Get the imagesets and sweeps
    stills = datablock.extract_stills()
    sweeps = datablock.extract_sweeps()
    if len(stills) > 0:
      raise Sorry("Sets of still images are currently unsupported")
    logger.info("Number of sweeps = %d" % len(sweeps))

    # Sort the sweeps by timestamps
    logger.info("Sorting sweeps based on timestamp")
    sweeps = sorted(sweeps, key=lambda x: x.get_scan().get_epochs()[0])

    # Count the number of datasets from each day
    from collections import Counter
    counter = Counter()
    for s in sweeps:
      timestamp = s.get_scan().get_epochs()[0]
      timestamp = datetime.datetime.fromtimestamp(timestamp)
      timestamp = timestamp.strftime('%Y-%m-%d')
      counter[timestamp] += 1

    # Print the number of datasets on each day
    for timestamp in sorted(counter.keys()):
      logger.info("%d datasets collected on %s" % (counter[timestamp], timestamp))

    # Loop though and see if any models might be shared
    b_list = [ s.get_beam() for s in sweeps ]
    d_list = [ s.get_detector() for s in sweeps ]
    g_list = [ s.get_goniometer() for s in sweeps ]
    b_index = []
    d_index = []
    g_index = []
    for i in range(len(sweeps)):
      b = b_list[i]
      d = d_list[i]
      g = g_list[i]
      bn = i
      dn = i
      gn = i
      if i > 0:
        bj = b_index[-1]
        dj = d_index[-1]
        gj = g_index[-1]
        if b.is_similar_to(b_list[bj]):
          bn = bj
        if d.is_similar_to(d_list[dj]):
          dn = dj
        if g.is_similar_to(g_list[gj]):
          gn = gj
      b_index.append(bn)
      d_index.append(dn)
      g_index.append(gn)

    # Print a table of possibly shared models
    from libtbx.table_utils import format as table
    rows = [["Sweep", "ID", "Beam", "Detector", "Goniometer", "Date", "Time"]]
    for i in range(len(sweeps)):
      timestamp = sweeps[i].get_scan().get_epochs()[0]
      timestamp = datetime.datetime.fromtimestamp(timestamp)
      date_str = timestamp.strftime('%Y-%m-%d')
      time_str = timestamp.strftime('%H:%M:%S')
      row = [
        '%s' % sweeps[i].get_template(),
        '%s' % i,
        '%s' % b_index[i],
        '%s' % d_index[i],
        '%s' % g_index[i],
        '%s' % date_str,
        '%s' % time_str]
      rows.append(row)
    logger.info(table(rows, has_header=True, justify='left', prefix=' '))

    # Print the time
    logger.info("Time Taken: %f" % (time() - start_time))
Beispiel #29
0
    def run(self):
        '''Execute the script.'''

        from dials.util.options import flatten_reflections, flatten_experiments, \
          flatten_datablocks
        import cPickle as pickle

        # Parse the command line
        params, options = self.parser.parse_args(show_diff_phil=True)
        reflections = flatten_reflections(params.input.reflections)
        experiments = flatten_experiments(params.input.experiments)
        datablocks = flatten_datablocks(params.input.datablock)

        # Try to load the models and data
        slice_exps = len(experiments) > 0
        slice_refs = len(reflections) > 0
        slice_dbs = len(datablocks) > 0

        # Catch case of nothing to do
        if not any([slice_exps, slice_refs, slice_dbs]):
            print "No suitable input provided"
            self.parser.print_help()
            return

        if reflections:
            if len(reflections) > 1:
                raise Sorry(
                    "Only one reflections list can be imported at present")
            reflections = reflections[0]

            # calculate frame numbers if needed
            if experiments:
                reflections = calculate_frame_numbers(reflections, experiments)

            # if we still don't have the right column give up
            if 'xyzobs.px.value' not in reflections:
                raise Sorry(
                    "These reflections do not have frame numbers set, and "
                    "there are no experiments provided to calculate these.")

        # set trivial case where no scan range is provided at all
        if not params.scan_range:
            params.scan_range = [None]

        # check if slicing into blocks
        if params.block_size is not None:
            # in this case for simplicity, ensure that there is either an
            # an experiment list or datablocks, but not both. Ensure there is only
            # a single scan contained within.
            if [slice_exps, slice_dbs].count(True) != 1:
                raise Sorry(
                    "For slicing into blocks please provide either datablocks"
                    " or experiments, but not both.")
            if slice_exps:
                if len(experiments) > 1:
                    raise Sorry(
                        "For slicing into blocks please provide a single "
                        "scan only")
                scan = experiments[0].scan
            if slice_dbs:
                scans = datablocks[0].unique_scans()
                if len(scans) > 1 or len(datablocks) > 1:
                    raise Sorry(
                        "For slicing into blocks please provide a single "
                        "scan only")
                scan = scans[0]

            # Having extracted the scan, calculate the blocks
            params.scan_range = calculate_block_ranges(scan, params.block_size)

            # Do the slicing then recombine
            if slice_exps:
                sliced = [slice_experiments(experiments, [sr])[0] \
                  for sr in params.scan_range]
                sliced_experiments = ExperimentList()
                for exp in sliced:
                    sliced_experiments.append(exp)

            if slice_dbs:
                sliced = [slice_datablocks(datablocks, [sr])[0] \
                  for sr in params.scan_range]
                imagesets = [db.extract_imagesets()[0] for db in sliced]
                sliced_datablocks = DataBlock(imagesets)

            # slice reflections if present
            if slice_refs:
                sliced = [slice_reflections(reflections, [sr]) \
                  for sr in params.scan_range]
                sliced_reflections = sliced[0]
                for i, rt in enumerate(sliced[1:]):
                    rt['id'] += (i + 1)  # set id
                    sliced_reflections.extend(rt)

        else:
            # slice each dataset into the requested subset
            if slice_exps:
                sliced_experiments = slice_experiments(experiments,
                                                       params.scan_range)
            if slice_refs:
                sliced_reflections = slice_reflections(reflections,
                                                       params.scan_range)
            if slice_dbs:
                sliced_datablocks = slice_datablocks(datablocks,
                                                     params.scan_range)

        # Save sliced experiments
        if slice_exps:
            output_experiments_filename = params.output.experiments_filename
            if output_experiments_filename is None:
                # take first filename as template
                bname = basename(params.input.experiments[0].filename)
                bname = splitext(bname)[0]
                if not bname: bname = "experiments"
                if len(params.scan_range
                       ) == 1 and params.scan_range[0] is not None:
                    ext = "_{0}_{1}.json".format(*params.scan_range[0])
                else:
                    ext = "_sliced.json"
                output_experiments_filename = bname + ext
            print 'Saving sliced experiments to {0}'.format(
                output_experiments_filename)

            from dxtbx.model.experiment.experiment_list import ExperimentListDumper
            dump = ExperimentListDumper(sliced_experiments)
            dump.as_json(output_experiments_filename)

        # Save sliced reflections
        if slice_refs:
            output_reflections_filename = params.output.reflections_filename
            if output_reflections_filename is None:
                # take first filename as template
                bname = basename(params.input.reflections[0].filename)
                bname = splitext(bname)[0]
                if not bname: bname = "reflections"
                if len(params.scan_range
                       ) == 1 and params.scan_range[0] is not None:
                    ext = "_{0}_{1}.pickle".format(*params.scan_range[0])
                else:
                    ext = "_sliced.pickle"
                output_reflections_filename = bname + ext

            print 'Saving sliced reflections to {0}'.format(
                output_reflections_filename)
            sliced_reflections.as_pickle(output_reflections_filename)

        # Save sliced datablocks
        if slice_dbs:
            output_datablocks_filename = params.output.datablocks_filename
            if output_datablocks_filename is None:
                # take first filename as template
                bname = basename(params.input.datablock[0].filename)
                bname = splitext(bname)[0]
                if not bname: bname = "datablock"
                if len(params.scan_range
                       ) == 1 and params.scan_range[0] is not None:
                    ext = "_{0}_{1}.json".format(*params.scan_range[0])
                else:
                    ext = "_sliced.json"
                output_datablocks_filename = bname + ext
            print 'Saving sliced datablocks to {0}'.format(
                output_datablocks_filename)

            from dxtbx.datablock import DataBlockDumper
            dump = DataBlockDumper(sliced_datablocks)
            dump.as_file(output_datablocks_filename)

        return
Beispiel #30
0
  def run(self):
    '''Execute the script.'''
    from dxtbx.datablock import DataBlockTemplateImporter
    from dials.util.options import flatten_datablocks
    from dials.util import log
    from logging import info
    from time import time
    from libtbx.utils import Abort

    # Parse the command line
    params, options = self.parser.parse_args(show_diff_phil=False)
    datablocks = flatten_datablocks(params.input.datablock)

    # Check we have some filenames
    if len(datablocks) == 0:

      # Check if a template has been set and print help if not, otherwise try to
      # import the images based on the template input
      if len(params.input.template) == 0:
        self.parser.print_help()
        exit(0)
      else:
        importer = DataBlockTemplateImporter(
          params.input.template,
          options.verbose)
        datablocks = importer.datablocks

    # Save the options
    self.options = options
    self.params = params
    self.load_reference_geometry()

    st = time()

    # Import stuff
    if len(datablocks) == 0:
      raise Abort('No datablocks specified')
    elif len(datablocks) > 1:
      raise Abort('Only 1 datablock can be processed at a time.')
    datablock = datablocks[0]

    if self.reference_detector is not None:
      for imageset in datablock.extract_imagesets():
        imageset.set_detector(self.reference_detector)

    # Configure logging
    log.config(
      params.verbosity,
      info='dials.process.log',
      debug='dials.process.debug.log')

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      info('The following parameters have been modified:\n')
      info(diff_phil)

    if self.params.output.datablock_filename:
      from dxtbx.datablock import DataBlockDumper
      dump = DataBlockDumper(datablock)
      dump.as_json(self.params.output.datablock_filename)

    # Do the processing
    observed = self.find_spots(datablock)
    experiments, indexed = self.index(datablock, observed)
    experiments = self.refine(experiments, indexed)
    integrated = self.integrate(experiments, indexed)

    # Total Time
    info("")
    info("Total Time Taken = %f seconds" % (time() - st))
Beispiel #31
0
def run(args):
    import libtbx.load_env
    from dials.util import log
    usage = "%s [options] datablock.json strong.pickle" % libtbx.env.dispatcher_name

    parser = OptionParser(usage=usage,
                          phil=phil_scope,
                          read_datablocks=True,
                          read_reflections=True,
                          check_format=False,
                          epilog=help_message)

    params, options = parser.parse_args(show_diff_phil=False)
    datablocks = flatten_datablocks(params.input.datablock)
    reflections = flatten_reflections(params.input.reflections)

    if len(datablocks) == 0 or len(reflections) == 0:
        parser.print_help()
        exit(0)

    imagesets = []

    for db in datablocks:
        imagesets.extend(db.extract_imagesets())

    spots = []

    for reflection in reflections:
        unique_ids = set(reflection['id'])
        for unique_id in sorted(unique_ids):
            spots.append(reflection.select(reflection['id'] == unique_id))

    assert len(imagesets) == len(spots)

    if params.output.compress:
        import gzip
        fout = gzip.GzipFile(params.output.csv, 'w')
    else:
        fout = open(params.output.csv, 'w')

    fout.write('# x,y,z,experiment_id,imageset_id\n')

    dp = params.output.dp

    if dp <= 0:
        fmt = '%f,%f,%f,%d,%d\n'
    else:
        fmt = '%%.%df,%%.%df,%%.%df,%%d,%%d\n' % (dp, dp, dp)

    print 'Using format:', fmt.strip()

    for k, (imageset, refl) in enumerate(zip(imagesets, spots)):
        if 'imageset_id' not in refl:
            refl['imageset_id'] = refl['id']

        reflmm = indexer_base.map_spots_pixel_to_mm_rad(
            spots=refl,
            detector=imageset.get_detector(),
            scan=imageset.get_scan())

        indexer_base.map_centroids_to_reciprocal_space(
            reflmm,
            detector=imageset.get_detector(),
            beam=imageset.get_beam(),
            goniometer=imageset.get_goniometer())

        rlp = reflmm['rlp']

        for _rlp in rlp:
            fout.write(fmt % (_rlp[0], _rlp[1], _rlp[2], k, k))

        print 'Appended %d spots to %s' % (len(rlp), params.output.csv)

    fout.close()
Beispiel #32
0
def run(args):
    import libtbx.load_env
    from libtbx.utils import Sorry
    from dials.util import log

    usage = "%s [options] datablock.json strong.pickle" % libtbx.env.dispatcher_name

    parser = OptionParser(
        usage=usage,
        phil=phil_scope,
        read_reflections=True,
        read_datablocks=True,
        read_datablocks_from_images=True,
        check_format=False,
        epilog=help_message,
    )

    params, options = parser.parse_args(show_diff_phil=False)

    from dials.util.version import dials_version

    logger.info(dials_version())

    # Log the diff phil
    diff_phil = parser.diff_phil.as_str()
    if diff_phil is not "":
        logger.info("The following parameters have been modified:\n")
        logger.info(diff_phil)

    datablocks = flatten_datablocks(params.input.datablock)
    reflections = flatten_reflections(params.input.reflections)

    if len(datablocks) == 0:
        parser.print_help()
        return
    imagesets = []
    for datablock in datablocks:
        imagesets.extend(datablock.extract_imagesets())

    if len(reflections) == 0:
        raise Sorry("No reflection lists found in input")
    if len(reflections) > 1:
        assert len(reflections) == len(imagesets)
        for i in range(len(reflections)):
            reflections[i]["imageset_id"] = flex.int(len(reflections[i]), i)
            if i > 0:
                reflections[0].extend(reflections[i])

    reflections_input = reflections[0]

    for imageset in imagesets:
        if (imageset.get_goniometer() is not None
                and imageset.get_scan() is not None
                and imageset.get_scan().get_oscillation()[1] == 0):
            imageset.set_goniometer(None)
            imageset.set_scan(None)

    from dials.algorithms.indexing.indexer import Indexer

    reflections = flex.reflection_table()

    for i, imageset in enumerate(imagesets):
        if "imageset_id" not in reflections_input:
            reflections_input["imageset_id"] = reflections_input["id"]
        sel = reflections_input["imageset_id"] == i
        refl = Indexer.map_spots_pixel_to_mm_rad(reflections_input.select(sel),
                                                 imageset.get_detector(),
                                                 imageset.get_scan())
        Indexer.map_centroids_to_reciprocal_space(
            refl,
            imageset.get_detector(),
            imageset.get_beam(),
            imageset.get_goniometer(),
        )
        refl["entering"] = Indexer.calculate_entering_flags(
            refl,
            beam=imageset.get_beam(),
            goniometer=imageset.get_goniometer())
        reflections.extend(refl)

    filter_ice(reflections, steps=params.steps)
    return
Beispiel #33
0
def run(args):
    import os
    import libtbx.load_env
    from libtbx.utils import Sorry
    from dials.util.options import OptionParser
    usage = "%s [options] datablock.json | image.cbf" % libtbx.env.dispatcher_name

    parser = OptionParser(usage=usage,
                          phil=phil_scope,
                          read_datablocks=True,
                          read_datablocks_from_images=True,
                          check_format=True,
                          epilog=help_message)

    params, options = parser.parse_args(show_diff_phil=True)

    datablocks = flatten_datablocks(params.input.datablock)
    if len(datablocks) == 0:
        parser.print_help()
        exit(0)

    imagesets = datablocks[0].extract_imagesets()

    brightness = params.brightness / 100
    vendortype = "made up"

    # check that binning is a power of 2
    binning = params.binning
    if not (binning > 0 and ((binning & (binning - 1)) == 0)):
        raise Sorry("binning must be a power of 2")

    output_dir = params.output_dir
    if output_dir is None:
        output_dir = "."
    elif not os.path.exists(output_dir):
        os.makedirs(output_dir)

    from rstbx.slip_viewer.tile_generation \
         import _get_flex_image, _get_flex_image_multipanel

    for imageset in imagesets:
        detector = imageset.get_detector()

        if len(detector) > 1:
            raise Sorry(
                'Currently only single panel detectors are supported by %s' %
                libtbx.env.dispatcher_name)

        panel = detector[0]
        scan = imageset.get_scan()
        # XXX is this inclusive or exclusive?
        saturation = panel.get_trusted_range()[1]
        if params.saturation:
            saturation = params.saturation
        if scan is not None and scan.get_oscillation()[1] > 0:
            start, end = scan.get_image_range()
        else:
            start, end = 0, len(imageset)
        for i_image in range(start, end + 1):
            image = imageset.get_raw_data(i_image - start)

            #if len(detector) == 1:
            #image = [image]

            trange = [p.get_trusted_range() for p in detector]
            mask = imageset.get_mask(i_image - start)
            if mask is None:
                mask = [
                    p.get_trusted_range_mask(im)
                    for im, p in zip(image, detector)
                ]

            if params.show_mask:
                for rd, m in zip(image, mask):
                    rd.set_selected(~m, -2)

            image = image_filter(image,
                                 mask,
                                 display=params.display,
                                 gain_value=params.gain,
                                 nsigma_b=params.nsigma_b,
                                 nsigma_s=params.nsigma_s,
                                 global_threshold=params.global_threshold,
                                 min_local=params.min_local,
                                 kernel_size=params.kernel_size)

            show_untrusted = params.show_mask
            if len(detector) > 1:
                # FIXME This doesn't work properly, as flex_image.size2() is incorrect
                # also binning doesn't work
                assert binning == 1
                flex_image = _get_flex_image_multipanel(
                    brightness=brightness,
                    panels=detector,
                    raw_data=image,
                    beam=imageset.get_beam(),
                    show_untrusted=show_untrusted)
            else:
                flex_image = _get_flex_image(brightness=brightness,
                                             data=image[0],
                                             binning=binning,
                                             saturation=saturation,
                                             vendortype=vendortype,
                                             show_untrusted=show_untrusted)

            flex_image.setWindow(0, 0, 1)
            flex_image.adjust(
                color_scheme=colour_schemes.get(params.colour_scheme))

            # now export as a bitmap
            flex_image.prep_string()
            try:
                from PIL import Image
            except ImportError:
                import Image
            # XXX is size//binning safe here?
            try:  # fromstring raises Exception in Pillow >= 3.0.0
                pil_img = Image.fromstring('RGB',
                                           (flex_image.size2() // binning,
                                            flex_image.size1() // binning),
                                           flex_image.export_string)
            except Exception:
                pil_img = Image.frombytes('RGB',
                                          (flex_image.size2() // binning,
                                           flex_image.size1() // binning),
                                          flex_image.export_string)
            path = os.path.join(
                output_dir,
                params.prefix + ("%04d" % i_image) + '.' + params.format)

            print "Exporting %s" % path
            with open(path, 'wb') as tmp_stream:
                pil_img.save(tmp_stream,
                             format=params.format,
                             compress_level=params.png.compress_level,
                             quality=params.jpeg.quality)
Beispiel #34
0
def run(args):

    from dials.util.options import OptionParser
    from dials.util.options import flatten_datablocks
    from dials.util.options import flatten_experiments
    from dials.util.options import flatten_reflections
    from dials.util import log
    import libtbx.load_env

    usage = "%s [options] datablock.json reflections.pickle" % (
        libtbx.env.dispatcher_name)

    parser = OptionParser(usage=usage,
                          phil=phil_scope,
                          read_datablocks=True,
                          read_experiments=True,
                          read_reflections=True,
                          check_format=False,
                          epilog=help_message)

    params, options = parser.parse_args()
    datablocks = flatten_datablocks(params.input.datablock)
    experiments = flatten_experiments(params.input.experiments)
    reflections = flatten_reflections(params.input.reflections)

    if (len(datablocks) == 0
            and len(experiments) == 0) or len(reflections) == 0:
        parser.print_help()
        exit(0)

    # Configure the logging
    log.config(info='dials.detect_blanks.log')

    # Log the diff phil
    diff_phil = parser.diff_phil.as_str()
    if diff_phil is not '':
        logger.info('The following parameters have been modified:\n')
        logger.info(diff_phil)

    reflections = reflections[0]

    if len(datablocks) == 0 and len(experiments) > 0:
        imagesets = experiments.imagesets()
    else:
        imagesets = []
        for datablock in datablocks:
            imagesets.extend(datablock.extract_imagesets())

    assert len(imagesets) == 1
    imageset = imagesets[0]
    scan = imageset.get_scan()

    integrated_sel = reflections.get_flags(reflections.flags.integrated)
    indexed_sel = reflections.get_flags(reflections.flags.indexed)
    centroid_outlier_sel = reflections.get_flags(
        reflections.flags.centroid_outlier)
    strong_sel = reflections.get_flags(reflections.flags.strong)
    indexed_sel &= (~centroid_outlier_sel)

    logger.info('Analysis of %i strong reflections:' % strong_sel.count(True))
    strong_results = blank_counts_analysis(
        reflections.select(strong_sel),
        scan,
        phi_step=params.phi_step,
        fractional_loss=params.counts_fractional_loss)
    for blank_start, blank_end in strong_results['blank_regions']:
        logger.info('Potential blank images: %i -> %i' %
                    (blank_start + 1, blank_end))

    indexed_results = None
    if indexed_sel.count(True) > 0:
        logger.info('Analysis of %i indexed reflections:' %
                    indexed_sel.count(True))
        indexed_results = blank_counts_analysis(
            reflections.select(indexed_sel),
            scan,
            phi_step=params.phi_step,
            fractional_loss=params.counts_fractional_loss)
        for blank_start, blank_end in indexed_results['blank_regions']:
            logger.info('Potential blank images: %i -> %i' %
                        (blank_start + 1, blank_end))

    integrated_results = None
    if integrated_sel.count(True) > 0:
        logger.info('Analysis of %i integrated reflections:' %
                    integrated_sel.count(True))
        integrated_results = blank_integrated_analysis(
            reflections.select(integrated_sel),
            scan,
            phi_step=params.phi_step,
            fractional_loss=params.misigma_fractional_loss)
        for blank_start, blank_end in integrated_results['blank_regions']:
            logger.info('Potential blank images: %i -> %i' %
                        (blank_start + 1, blank_end))

    d = {
        'strong': strong_results,
        'indexed': indexed_results,
        'integrated': integrated_results
    }

    if params.output.json is not None:
        import json
        with open(params.output.json, 'wb') as f:
            json.dump(d, f)

    if params.output.plot:
        from matplotlib import pyplot

        plots = [(strong_results, '-')]
        if indexed_results:
            plots.append((indexed_results, '--'))
        if integrated_results:
            plots.append((integrated_results, ':'))

        for results, linestyle in plots:
            xs = results['data'][0]['x']
            ys = results['data'][0]['y']
            xmax = max(xs)
            ymax = max(ys)
            xs = [x / xmax for x in xs]
            ys = [y / ymax for y in ys]
            blanks = results['data'][0]['blank']
            pyplot.plot(xs, ys, color='blue', linestyle=linestyle)
            pyplot.plot(*zip(*[(x, y) for x, y, blank in zip(xs, ys, blanks)
                               if blank]),
                        color='red',
                        linestyle=linestyle)
        pyplot.ylim(0)
        pyplot.show()
        pyplot.clf()
Beispiel #35
0
def read_expt(filename):
    from dials.util.options import flatten_datablocks
    from dials.util.phil import DataBlockConverters

    converter = DataBlockConverters(check_format=False)
    return flatten_datablocks([converter.from_string(filename)])
    def run(self):
        '''Execute the script.'''
        from dials.array_family import flex
        from dials.util.options import flatten_reflections
        from dials.util.options import flatten_datablocks
        from dials.util.options import flatten_experiments
        from libtbx.utils import Sorry

        # Parse the command line
        params, options = self.parser.parse_args(show_diff_phil=True)
        reflections = flatten_reflections(params.input.reflections)

        if params.input.datablock is not None and len(params.input.datablock):
            datablocks = flatten_datablocks(params.input.datablock)
            assert len(datablocks) == 1
            imagesets = datablocks[0].extract_imagesets()
            assert len(imagesets) == 1
            imageset = imagesets[0]
        elif params.input.experiments is not None and len(
                params.input.experiments):
            experiments = flatten_experiments(params.input.experiments)
            assert len(datablocks) == 1
            imageset = experiments[0].imageset
        else:
            imageset = None

        if len(reflections) == 0:
            self.parser.print_help()
            raise Sorry('No valid reflection file given')
        if len(reflections) != 1:
            self.parser.print_help()
            raise Sorry('Exactly 1 reflection file must be specified')
        reflections = reflections[0]

        # Check params
        if params.d_min is not None and params.d_max is not None:
            if params.d_min > params.d_max:
                raise Sorry("d_min must be less than d_max")
        if params.d_min is not None or params.d_max is not None:
            if 'd' not in reflections:
                raise Sorry("Reflection table has no resolution information")

        # Check params
        if params.partiality.min is not None and params.partiality.max is not None:
            if params.min > params.max:
                raise Sorry(
                    "partiality.min must be less than partiality.d_max")
        if params.partiality.min is not None or params.partiality.max is not None:
            if 'partiality' not in reflections:
                raise Sorry("Reflection table has no partiality information")

        print "{0} reflections loaded".format(len(reflections))

        if (len(params.inclusions.flag) == 0
                and len(params.exclusions.flag) == 0 and params.d_min is None
                and params.d_max is None and params.partiality.min is None
                and params.partiality.max is None
                and not params.ice_rings.filter):
            print "No filter specified. Performing analysis instead."
            return self.analysis(reflections)

        # Build up the initial inclusion selection
        inc = flex.bool(len(reflections), True)
        # 2016/07/06 GW logic here not right should be && for each flag not or?
        for flag in params.inclusions.flag:
            sel = reflections.get_flags(getattr(reflections.flags, flag))
            inc = inc & sel
        reflections = reflections.select(inc)

        print "{0} reflections selected to form the working set".format(
            len(reflections))

        # Make requested exclusions from the current selection
        exc = flex.bool(len(reflections))
        for flag in params.exclusions.flag:
            print flag
            sel = reflections.get_flags(getattr(reflections.flags, flag))
            exc = exc | sel
        reflections = reflections.select(~exc)

        print "{0} reflections excluded from the working set".format(
            exc.count(True))

        # Filter based on resolution
        if params.d_min is not None:
            selection = reflections['d'] >= params.d_min
            reflections = reflections.select(selection)
            print "Selected %d reflections with d >= %f" % (len(reflections),
                                                            params.d_min)

        # Filter based on resolution
        if params.d_max is not None:
            selection = reflections['d'] <= params.d_max
            reflections = reflections.select(selection)
            print "Selected %d reflections with d <= %f" % (len(reflections),
                                                            params.d_max)

        # Filter based on partiality
        if params.partiality.min is not None:
            selection = reflections['partiality'] >= params.partiality.min
            reflections = reflections.select(selection)
            print "Selected %d reflections with partiality >= %f" % (
                len(reflections), params.partiality.min)

        # Filter based on partiality
        if params.partiality.max is not None:
            selection = reflections['partiality'] <= params.partiality.max
            reflections = reflections.select(selection)
            print "Selected %d reflections with partiality <= %f" % (
                len(reflections), params.partiality.max)

        # Filter powder rings

        if params.ice_rings.filter:
            from dials.algorithms.integration import filtering
            if 'd' in reflections:
                d_spacings = reflections['d']
            else:
                from cctbx import uctbx
                if 'rlp' not in reflections:
                    assert imageset is not None
                    from dials.algorithms.spot_finding.per_image_analysis import map_to_reciprocal_space
                    reflections = map_to_reciprocal_space(
                        reflections, imageset)
                d_star_sq = flex.pow2(reflections['rlp'].norms())
                d_spacings = uctbx.d_star_sq_as_d(d_star_sq)

            d_min = params.ice_rings.d_min
            width = params.ice_rings.width

            if d_min is None:
                d_min = flex.min(d_spacings)

            ice_filter = filtering.PowderRingFilter(
                params.ice_rings.unit_cell,
                params.ice_rings.space_group.group(), d_min, width)

            ice_sel = ice_filter(d_spacings)

            print "Rejecting %i reflections at ice ring resolution" % ice_sel.count(
                True)
            reflections = reflections.select(~ice_sel)
            #reflections = reflections.select(ice_sel)

        # Save filtered reflections to file
        if params.output.reflections:
            print "Saving {0} reflections to {1}".format(
                len(reflections), params.output.reflections)
            reflections.as_pickle(params.output.reflections)

        return
Beispiel #37
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  import libtbx.load_env

  usage = "%s [options] datablock.json" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    check_format=True,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)

  if len(datablocks) == 0:
    parser.print_help()
    exit(0)

  assert len(datablocks) == 1
  imagesets = datablocks[0].extract_imagesets()

  imageset = imagesets[0]
  goniometer = imageset.get_goniometer()
  detector = imageset.get_detector()
  scan = imageset.get_scan()
  masker = imageset.masker().format_class(imageset.paths()[0]).get_goniometer_shadow_masker()
  if masker is None:
    raise Sorry('Goniometer model does not support shadowing.')
  angles = goniometer.get_angles()
  names = goniometer.get_names()
  scan_axis = goniometer.get_scan_axis()
  phi = angles[0]

  if params.step_size is libtbx.Auto:
    if params.mode == '1d':
      step = scan.get_oscillation()[1]
    else:
      step = 10
  else:
    step = params.step_size

  if params.mode == '1d':
    if params.oscillation_range is not None:
      start, end = params.oscillation_range
    else:
      start, end = scan.get_oscillation_range()

    scan_points = flex.double(libtbx.utils.frange(start, end, step=step))
    n_px_shadowed = flex.double(scan_points.size(), 0)
    n_px_tot = flex.double(scan_points.size(), 0)

    assert len(angles) == 3
    for i, scan_angle in enumerate(scan_points):
      shadow = masker.project_extrema(detector, scan_angle=scan_angle)
      for p_id in range(len(detector)):
        px_x, px_y = detector[p_id].get_image_size()
        n_px_tot[i] += px_x * px_y
        if shadow[p_id].size() < 4:
          continue
        n_px_shadowed[i] += polygon_area(shadow[p_id])

  else:
    kappa_values = flex.double(libtbx.utils.frange(0, 360, step=step))
    omega_values = flex.double(libtbx.utils.frange(0, 360, step=step))
    grid = flex.grid(kappa_values.size(), omega_values.size())
    n_px_shadowed = flex.double(grid, 0)
    n_px_tot = flex.double(grid, 0)

    assert len(angles) == 3
    for i, kappa in enumerate(kappa_values):
      for j, omega in enumerate(omega_values):
        masker.goniometer.set_angles((phi, kappa, omega))
        shadow = masker.project_extrema(detector, scan_angle=omega)
        for p_id in range(len(detector)):
          px_x, px_y = detector[p_id].get_image_size()
          n_px_tot[i, j] += px_x * px_y
          if shadow[p_id].size() < 4:
            continue
          n_px_shadowed[i, j] += polygon_area(shadow[p_id])

  fraction_shadowed = n_px_shadowed/n_px_tot

  if params.output.json is not None:
    if params.mode == '2d':
      raise Sorry('json output not supported for mode=2d')

    print 'Writing json output to %s' %params.output.json
    d = {
      'scan_points': list(scan_points),
      'fraction_shadowed': list(fraction_shadowed),
    }
    import json
    with open(params.output.json, 'wb') as f:
      json.dump(d, f)

  if params.output.plot is not None:
    import matplotlib
    matplotlib.use('Agg')
    from matplotlib import pyplot as plt
    plt.style.use('ggplot')

    if params.mode == '1d':
      plt.plot(scan_points.as_numpy_array(), fraction_shadowed.as_numpy_array() * 100)
      plt.xlabel('%s angle (degrees)' %names[scan_axis])
      plt.ylabel('Shadowed area (%)')
      if params.y_max is not None:
        plt.ylim(0, params.y_max)
      else:
        plt.ylim(0, plt.ylim()[1])
    else:
      plt.imshow(fraction_shadowed.as_numpy_array() * 100, interpolation='bicubic')
      plt.xlabel('%s angle (degrees)' %names[2])
      plt.ylabel('%s angle (degrees)' %names[1])
      plt.xlim(0, 360/step)
      plt.ylim(0, 360/step)
      plt.axes().set_xticklabels(["%.0f" %(step * t) for t in plt.xticks()[0]])
      plt.axes().set_yticklabels(["%.0f" %(step * t) for t in plt.yticks()[0]])
      cbar = plt.colorbar()
      cbar.set_label('Shadowed area (%)')

    if params.output.size_inches is not None:
      fig = plt.gcf()
      fig.set_size_inches(params.output.size_inches)
    plt.tight_layout()
    print 'Saving plot to %s' %params.output.plot
    plt.savefig(params.output.plot)
Beispiel #38
0
    def run(self):
        ''' Perform the integration. '''
        from dials.util.options import flatten_datablocks, flatten_experiments
        from dials.util import log

        # Parse the command line
        params, options = self.parser.parse_args(show_diff_phil=False)
        experiments = flatten_experiments(params.input.experiments)
        datablocks = flatten_datablocks(params.input.datablock)
        if len(experiments) == 0 and len(datablocks) == 0:
            self.parser.print_help()
            return

        if len(datablocks) > 0:
            assert len(datablocks) == 1
            imagesets = datablocks[0].extract_imagesets()
            assert len(imagesets) == 1
            imageset = imagesets[0]
            beam = imageset.get_beam()
            detector = imageset.get_detector()
        else:
            assert len(experiments) == 1
            imageset = experiments[0].imageset
            beam = experiments[0].beam
            detector = experiments[0].detector

        # Configure logging
        log.config()

        # Set the scan range
        if params.scan_range is None:
            scan_range = (0, len(imageset))
        else:
            scan_range = params.scan_range
            i0, i1 = scan_range
            if i0 < 0 or i1 > len(imageset):
                raise RuntimeError('Scan range outside image range')
            if i0 >= i1:
                raise RuntimeError('Invalid scan range')

        summed_data = None
        summed_mask = None

        # Loop through images
        for i in range(*scan_range):
            logger.info("Reading image %d" % i)

            # Read image
            data = imageset.get_raw_data(i)
            mask = imageset.get_mask(i)
            assert isinstance(data, tuple)
            assert isinstance(mask, tuple)

            if summed_data is None:
                summed_mask = mask
                summed_data = data
            else:
                summed_data = [sd + d for sd, d in zip(summed_data, data)]
                summed_mask = [sm & m for sm, m in zip(summed_mask, mask)]

        # Compute min and max and num
        if params.num_bins is None:
            num_bins = sum(sum(p.get_image_size()) for p in detector)
        if params.d_max is None:
            vmin = 0
        else:
            vmin = (1.0 / d_max)**2
        if params.d_min is None:
            params.d_min = detector.get_max_resolution(beam.get_s0())
        vmax = (1.0 / params.d_min)**2

        # Print some info
        logger.info("Min 1/d^2: %f" % vmin)
        logger.info("Max 1/d^2: %f" % vmax)
        logger.info("Num bins:  %d" % num_bins)

        # Compute the radial average
        from dials.algorithms.background import RadialAverage
        radial_average = RadialAverage(beam, detector, vmin, vmax, num_bins)
        for d, m in zip(summed_data, summed_mask):
            radial_average.add(d.as_double() / (scan_range[1] - scan_range[0]),
                               m)
        mean = radial_average.mean()
        reso = radial_average.inv_d2()

        logger.info("Writing to %s" % params.output.filename)
        with open(params.output.filename, "w") as outfile:
            for r, m in zip(reso, mean):
                outfile.write("%f, %f\n" % (r, m))
Beispiel #39
0
def run(args):
    import libtbx.load_env
    from libtbx.utils import Sorry
    from dials.util import log
    usage = "%s [options] datablock.json strong.pickle" % libtbx.env.dispatcher_name

    parser = OptionParser(usage=usage,
                          phil=phil_scope,
                          read_reflections=True,
                          read_datablocks=True,
                          read_experiments=True,
                          check_format=False,
                          epilog=help_message)

    params, options = parser.parse_args(show_diff_phil=False)

    # Configure the logging
    log.config(params.verbosity,
               info=params.output.log,
               debug=params.output.debug_log)

    from dials.util.version import dials_version
    logger.info(dials_version())

    # Log the diff phil
    diff_phil = parser.diff_phil.as_str()
    if diff_phil is not '':
        logger.info('The following parameters have been modified:\n')
        logger.info(diff_phil)

    datablocks = flatten_datablocks(params.input.datablock)
    experiments = flatten_experiments(params.input.experiments)
    reflections = flatten_reflections(params.input.reflections)

    if len(datablocks) == 0:
        if len(experiments) > 0:
            imagesets = experiments.imagesets()
        else:
            parser.print_help()
            return
    #elif len(datablocks) > 1:
    #raise Sorry("Only one DataBlock can be processed at a time")
    else:
        imagesets = []
        for datablock in datablocks:
            imagesets.extend(datablock.extract_imagesets())
    if len(experiments):
        known_crystal_models = experiments.crystals()
    else:
        known_crystal_models = None

    if len(reflections) == 0:
        raise Sorry("No reflection lists found in input")
    if len(reflections) > 1:
        #raise Sorry("Multiple reflections lists provided in input")
        assert len(reflections) == len(imagesets)
        from scitbx.array_family import flex
        for i in range(len(reflections)):
            reflections[i]['imageset_id'] = flex.int(len(reflections[i]), i)
            if i > 0:
                reflections[0].extend(reflections[i])

    #assert(len(reflections) == 1)
    reflections = reflections[0]

    for imageset in imagesets:
        if (imageset.get_goniometer() is not None
                and imageset.get_scan() is not None
                and imageset.get_scan().get_oscillation()[1] == 0):
            imageset.set_goniometer(None)
            imageset.set_scan(None)

    from dials.algorithms.indexing.indexer import indexer_base
    idxr = indexer_base.from_parameters(
        reflections,
        imagesets,
        known_crystal_models=known_crystal_models,
        params=params)
    refined_experiments = idxr.refined_experiments
    reflections = copy.deepcopy(idxr.refined_reflections)
    reflections.extend(idxr.unindexed_reflections)
    if len(refined_experiments):
        logger.info("Saving refined experiments to %s" %
                    params.output.experiments)
        idxr.export_as_json(refined_experiments,
                            file_name=params.output.experiments)
        logger.info("Saving refined reflections to %s" %
                    params.output.reflections)
        idxr.export_reflections(reflections,
                                file_name=params.output.reflections)

        if params.output.unindexed_reflections is not None:
            logger.info("Saving unindexed reflections to %s" %
                        params.output.unindexed_reflections)
            idxr.export_reflections(
                idxr.unindexed_reflections,
                file_name=params.output.unindexed_reflections)

    return
Beispiel #40
0
def run(args):

    from dials.util.options import OptionParser
    from dials.util.options import flatten_datablocks
    import libtbx.load_env

    usage = "%s [options] datablock.json reference=reference_datablock.json" % (
        libtbx.env.dispatcher_name
    )

    parser = OptionParser(
        usage=usage,
        phil=phil_scope,
        read_datablocks=True,
        check_format=False,
        epilog=help_message,
    )

    params, options = parser.parse_args(show_diff_phil=True)
    datablocks = flatten_datablocks(params.input.datablock)

    if len(datablocks) == 0:
        parser.print_help()
        exit()

    # Load reference geometry
    reference_detector = None
    if params.input.reference is not None:
        from dxtbx.serialize import load

        try:
            reference_experiments = load.experiment_list(
                params.input.reference, check_format=False
            )
            assert len(reference_experiments.detectors()) == 1
            reference_detector = reference_experiments.detectors()[0]
        except Exception as e:
            reference_datablocks = load.datablock(params.input.reference)
            assert len(reference_datablocks) == 1
            imageset = reference_datablocks[0].extract_imagesets()[0]
            reference_detector = imageset.get_detector()

    assert len(datablocks) == 1
    imageset = datablocks[0].extract_imagesets()[0]
    detector = imageset.get_detector()

    h = detector.hierarchy()
    href = reference_detector.hierarchy()

    assert len(h) == len(href)

    assert (
        abs(h.get_directed_distance() - href.get_directed_distance())
        < params.max_delta_distance
    )

    for panel, panel_ref in zip(h.children(), href.children()):
        panel.set_local_frame(
            panel_ref.get_local_fast_axis(),
            panel_ref.get_local_slow_axis(),
            panel_ref.get_local_origin(),
        )

    print("Writing metrology-corrected datablock to %s" % params.output.datablock)
    from dxtbx.serialize import dump

    dump.datablock(datablocks, params.output.datablock)

    return
Beispiel #41
0
    def run(self):
        ''' Parse the options. '''
        from dials.util.options import flatten_experiments, flatten_datablocks, flatten_reflections
        # Parse the command line arguments
        params, options = self.parser.parse_args(show_diff_phil=True)
        self.params = params
        experiments = flatten_experiments(params.input.experiments)
        datablocks = flatten_datablocks(params.input.datablock)
        reflections = flatten_reflections(params.input.reflections)

        # Find all detector objects
        detectors = []
        detectors.extend(experiments.detectors())
        dbs = []
        for datablock in datablocks:
            dbs.extend(datablock.unique_detectors())
        detectors.extend(dbs)

        # Verify inputs
        if len(detectors) != 2:
            print "Please provide two experiments and or datablocks for comparison"
            return

        # These lines exercise the iterate_detector_at_level and iterate_panels functions
        # for a detector with 4 hierarchy levels
        """
    print "Testing iterate_detector_at_level"
    for level in xrange(4):
      print "iterating at level", level
      for panelg in iterate_detector_at_level(detectors[0].hierarchy(), 0, level):
        print panelg.get_name()

    print "Testing iterate_panels"
    for level in xrange(4):
      print "iterating at level", level
      for panelg in iterate_detector_at_level(detectors[0].hierarchy(), 0, level):
        for panel in iterate_panels(panelg):
          print panel.get_name()
    """
        tmp = []
        for refls in reflections:
            print "N reflections total:", len(refls)
            refls = refls.select(
                refls.get_flags(refls.flags.used_in_refinement))
            print "N reflections used in refinement", len(refls)
            print "Reporting only on those reflections used in refinement"

            refls['difference_vector_norms'] = (
                refls['xyzcal.mm'] - refls['xyzobs.mm.value']).norms()
            tmp.append(refls)
        reflections = tmp

        # Iterate through the detectors, computing the congruence statistics
        delta_normals = {}
        z_angles = {}
        f_deltas = {}
        s_deltas = {}
        z_deltas = {}
        o_deltas = {}  # overall
        z_offsets_d = {}
        refl_counts = {}
        all_delta_normals = flex.double()
        all_rdelta_normals = flex.double()
        all_tdelta_normals = flex.double()
        all_z_angles = flex.double()
        all_f_deltas = flex.double()
        all_s_deltas = flex.double()
        all_z_deltas = flex.double()
        all_deltas = flex.double()
        all_refls_count = flex.int()

        all_normal_angles = flex.double()
        all_rnormal_angles = flex.double()
        all_tnormal_angles = flex.double()
        pg_normal_angle_sigmas = flex.double()
        pg_rnormal_angle_sigmas = flex.double()
        pg_tnormal_angle_sigmas = flex.double()
        all_rot_z = flex.double()
        pg_rot_z_sigmas = flex.double()
        pg_bc_dists = flex.double()
        all_bc_dist = flex.double()
        all_f_offsets = flex.double()
        all_s_offsets = flex.double()
        all_z_offsets = flex.double()
        pg_f_offset_sigmas = flex.double()
        pg_s_offset_sigmas = flex.double()
        pg_z_offset_sigmas = flex.double()
        pg_offset_sigmas = flex.double()
        all_weights = flex.double()

        congruence_table_data = []
        detector_table_data = []
        rmsds_table_data = []
        root1 = detectors[0].hierarchy()
        root2 = detectors[1].hierarchy()

        s0 = col(
            flex.vec3_double([col(b.get_s0())
                              for b in experiments.beams()]).mean())

        # Compute a set of radial and transverse displacements for each reflection
        print "Setting up stats..."
        tmp_refls = []
        for refls, expts in zip(
                reflections,
            [wrapper.data for wrapper in params.input.experiments]):
            tmp = flex.reflection_table()
            assert len(expts.detectors()) == 1
            dect = expts.detectors()[0]
            # Need to construct a variety of vectors
            for panel_id, panel in enumerate(dect):
                panel_refls = refls.select(refls['panel'] == panel_id)
                bcl = flex.vec3_double()
                # Compute the beam center in lab space (a vector pointing from the origin to where the beam would intersect
                # the panel, if it did intersect the panel)
                for expt_id in set(panel_refls['id']):
                    beam = expts[expt_id].beam
                    s0 = beam.get_s0()
                    expt_refls = panel_refls.select(
                        panel_refls['id'] == expt_id)
                    beam_centre = panel.get_beam_centre_lab(s0)
                    bcl.extend(flex.vec3_double(len(expt_refls), beam_centre))
                panel_refls['beam_centre_lab'] = bcl

                # Compute obs in lab space
                x, y, _ = panel_refls['xyzobs.mm.value'].parts()
                c = flex.vec2_double(x, y)
                panel_refls['obs_lab_coords'] = panel.get_lab_coord(c)
                # Compute deltaXY in panel space. This vector is relative to the panel origin
                x, y, _ = (panel_refls['xyzcal.mm'] -
                           panel_refls['xyzobs.mm.value']).parts()
                # Convert deltaXY to lab space, subtracting off of the panel origin
                panel_refls['delta_lab_coords'] = panel.get_lab_coord(
                    flex.vec2_double(x, y)) - panel.get_origin()
                tmp.extend(panel_refls)
            refls = tmp
            # The radial vector points from the center of the reflection to the beam center
            radial_vectors = (refls['obs_lab_coords'] -
                              refls['beam_centre_lab']).each_normalize()
            # The transverse vector is orthogonal to the radial vector and the beam vector
            transverse_vectors = radial_vectors.cross(
                refls['beam_centre_lab']).each_normalize()
            # Compute the raidal and transverse components of each deltaXY
            refls['radial_displacements'] = refls['delta_lab_coords'].dot(
                radial_vectors)
            refls['transverse_displacements'] = refls['delta_lab_coords'].dot(
                transverse_vectors)

            tmp_refls.append(refls)
        reflections = tmp_refls

        for pg_id, (pg1, pg2) in enumerate(
                zip(
                    iterate_detector_at_level(root1, 0,
                                              params.hierarchy_level),
                    iterate_detector_at_level(root2, 0,
                                              params.hierarchy_level))):
            """ First compute statistics for detector congruence """
            # Count up the number of reflections in this panel group pair for use as a weighting scheme
            total_refls = 0
            pg1_refls = 0
            pg2_refls = 0
            for p1, p2 in zip(iterate_panels(pg1), iterate_panels(pg2)):
                r1 = len(reflections[0].select(
                    reflections[0]['panel'] == id_from_name(
                        detectors[0], p1.get_name())))
                r2 = len(reflections[1].select(
                    reflections[1]['panel'] == id_from_name(
                        detectors[1], p2.get_name())))
                total_refls += r1 + r2
                pg1_refls += r1
                pg2_refls += r2
            if pg1_refls == 0 and pg2_refls == 0:
                print "No reflections on panel group", pg_id
                continue

            assert pg1.get_name() == pg2.get_name()
            refl_counts[pg1.get_name()] = total_refls

            row = ["%d" % pg_id]
            for pg, refls, det in zip([pg1, pg2], reflections, detectors):
                pg_refls = flex.reflection_table()
                for p in iterate_panels(pg):
                    pg_refls.extend(
                        refls.select(
                            refls['panel'] == id_from_name(det, p.get_name())))
                if len(pg_refls) == 0:
                    rmsd = r_rmsd = t_rmsd = 0
                else:
                    rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['difference_vector_norms']) /
                        len(pg_refls)) * 1000
                    r_rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['radial_displacements']) /
                        len(pg_refls)) * 1000
                    t_rmsd = math.sqrt(
                        flex.sum_sq(pg_refls['transverse_displacements']) /
                        len(pg_refls)) * 1000

                row.extend([
                    "%6.1f" % rmsd,
                    "%6.1f" % r_rmsd,
                    "%6.1f" % t_rmsd,
                    "%8d" % len(pg_refls)
                ])
            rmsds_table_data.append(row)

            # Angle between normals of pg1 and pg2
            delta_norm_angle = col(pg1.get_normal()).angle(col(
                pg2.get_normal()),
                                                           deg=True)
            all_delta_normals.append(delta_norm_angle)

            # compute radial and transverse components of the delta between normal angles
            pgo = (get_center(pg1) + get_center(pg2)) / 2
            ro = (get_center(root1) + get_center(root2)) / 2
            rn = (col(root1.get_normal()) + col(root2.get_normal())) / 2
            rf = (col(root1.get_fast_axis()) + col(root2.get_fast_axis())) / 2
            rs = (col(root1.get_slow_axis()) + col(root2.get_slow_axis())) / 2

            ro_pgo = pgo - ro  # vector from the detector origin to the average panel group origin
            if ro_pgo.length() == 0:
                radial = col((0, 0, 0))
                transverse = col((0, 0, 0))
            else:
                radial = ((rf.dot(ro_pgo) * rf) + (rs.dot(ro_pgo) * rs)
                          ).normalize()  # component of ro_pgo in rf rs plane
                transverse = rn.cross(radial).normalize()
            # now radial and transverse are vectors othogonal to each other and the detector normal, such that
            # radial points at the panel group origin
            # v1 and v2 are the components of pg 1 and 2 normals in the rn radial plane
            v1 = (radial.dot(col(pg1.get_normal())) *
                  radial) + (rn.dot(col(pg1.get_normal())) * rn)
            v2 = (radial.dot(col(pg2.get_normal())) *
                  radial) + (rn.dot(col(pg2.get_normal())) * rn)
            rdelta_norm_angle = v1.angle(v2, deg=True)
            if v1.cross(v2).dot(transverse) < 0:
                rdelta_norm_angle = -rdelta_norm_angle
            all_rdelta_normals.append(rdelta_norm_angle)
            # v1 and v2 are the components of pg 1 and 2 normals in the rn transverse plane
            v1 = (transverse.dot(col(pg1.get_normal())) *
                  transverse) + (rn.dot(col(pg1.get_normal())) * rn)
            v2 = (transverse.dot(col(pg2.get_normal())) *
                  transverse) + (rn.dot(col(pg2.get_normal())) * rn)
            tdelta_norm_angle = v1.angle(v2, deg=True)
            if v1.cross(v2).dot(radial) < 0:
                tdelta_norm_angle = -tdelta_norm_angle
            all_tdelta_normals.append(tdelta_norm_angle)

            # compute the angle between fast axes of these panel groups
            z_angle = col(pg1.get_fast_axis()[0:2]).angle(col(
                pg2.get_fast_axis()[0:2]),
                                                          deg=True)
            all_z_angles.append(z_angle)
            z_angles[pg1.get_name()] = z_angle

            all_refls_count.append(total_refls)
            all_weights.append(pg1_refls)
            all_weights.append(pg2_refls)
            """ Now compute statistics measuring the reality of the detector. For example, instead of the distance between two things,
      we are concerned with the location of those things relative to laboratory space """
            # Compute distances between panel groups and beam center
            # Also compute offset along Z axis
            dists = flex.double()
            f_offsets = flex.double()
            s_offsets = flex.double()
            z_offsets = flex.double()
            for pg, r in zip([pg1, pg2], [root1, root2]):
                bc = col(pg.get_beam_centre_lab(s0))
                ori = get_center(pg)

                dists.append((ori - bc).length())

                rori = col(r.get_origin())
                delta_ori = ori - rori
                r_norm = col(r.get_normal())
                r_fast = col(r.get_fast_axis())
                r_slow = col(r.get_slow_axis())
                f_offsets.append(r_fast.dot(delta_ori) * 1000)
                s_offsets.append(r_slow.dot(delta_ori) * 1000)
                z_offsets.append(r_norm.dot(delta_ori) * 1000)

            fd = abs(f_offsets[0] - f_offsets[1])
            sd = abs(s_offsets[0] - s_offsets[1])
            zd = abs(z_offsets[0] - z_offsets[1])
            od = math.sqrt(fd**2 + sd**2 + zd**2)
            f_deltas[pg1.get_name()] = fd
            s_deltas[pg1.get_name()] = sd
            z_deltas[pg1.get_name()] = zd
            o_deltas[pg1.get_name()] = od
            all_f_deltas.append(fd)
            all_s_deltas.append(sd)
            all_z_deltas.append(zd)
            all_deltas.append(od)

            all_f_offsets.extend(f_offsets)
            all_s_offsets.extend(s_offsets)
            all_z_offsets.extend(z_offsets)

            # Compute angle between detector normal and panel group normal
            # Compute rotation of panel group around detector normal
            pg_rotz = flex.double()
            norm_angles = flex.double()
            rnorm_angles = flex.double()
            tnorm_angles = flex.double()
            for pg, r in zip([pg1, pg2], [root1, root2]):

                pgo = get_center(pg)
                pgn = col(pg.get_normal())
                pgf = col(pg.get_fast_axis())

                ro = get_center(r)
                rn = col(r.get_normal())
                rf = col(r.get_fast_axis())
                rs = col(r.get_slow_axis())

                norm_angle = rn.angle(pgn, deg=True)
                norm_angles.append(norm_angle)
                all_normal_angles.append(norm_angle)

                ro_pgo = pgo - ro  # vector from the detector origin to the panel group origin
                if ro_pgo.length() == 0:
                    radial = col((0, 0, 0))
                    transverse = col((0, 0, 0))
                else:
                    radial = (
                        (rf.dot(ro_pgo) * rf) + (rs.dot(ro_pgo) * rs)
                    ).normalize()  # component of ro_pgo in rf rs plane
                    transverse = rn.cross(radial).normalize()
                # now radial and transverse are vectors othogonal to each other and the detector normal, such that
                # radial points at the panel group origin
                # v is the component of pgn in the rn radial plane
                v = (radial.dot(pgn) * radial) + (rn.dot(pgn) * rn)
                angle = rn.angle(v, deg=True)
                if rn.cross(v).dot(transverse) < 0:
                    angle = -angle
                rnorm_angles.append(angle)
                all_rnormal_angles.append(angle)
                # v is the component of pgn in the rn transverse plane
                v = (transverse.dot(pgn) * transverse) + (rn.dot(pgn) * rn)
                angle = rn.angle(v, deg=True)
                if rn.cross(v).dot(radial) < 0:
                    angle = -angle
                tnorm_angles.append(angle)
                all_tnormal_angles.append(angle)

                # v is the component of pgf in the rf rs plane
                v = (rf.dot(pgf) * rf) + (rs.dot(pgf) * rs)
                angle = rf.angle(v, deg=True)
                angle = angle - (round(angle / 90) * 90
                                 )  # deviation from 90 degrees
                pg_rotz.append(angle)
                all_rot_z.append(angle)

            # Set up table rows using stats aggregated from above
            pg_weights = flex.double([pg1_refls, pg2_refls])
            if 0 in pg_weights:
                dist_m = dist_s = norm_angle_m = norm_angle_s = rnorm_angle_m = rnorm_angle_s = 0
                tnorm_angle_m = tnorm_angle_s = rotz_m = rotz_s = 0
                fo_m = fo_s = so_m = so_s = zo_m = zo_s = o_s = 0

            else:
                stats = flex.mean_and_variance(dists, pg_weights)
                dist_m = stats.mean()
                dist_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(norm_angles, pg_weights)
                norm_angle_m = stats.mean()
                norm_angle_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(rnorm_angles, pg_weights)
                rnorm_angle_m = stats.mean()
                rnorm_angle_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(tnorm_angles, pg_weights)
                tnorm_angle_m = stats.mean()
                tnorm_angle_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(pg_rotz, pg_weights)
                rotz_m = stats.mean()
                rotz_s = stats.gsl_stats_wsd()

                stats = flex.mean_and_variance(f_offsets, pg_weights)
                fo_m = stats.mean()
                fo_s = stats.gsl_stats_wsd()
                stats = flex.mean_and_variance(s_offsets, pg_weights)
                so_m = stats.mean()
                so_s = stats.gsl_stats_wsd()
                stats = flex.mean_and_variance(z_offsets, pg_weights)
                zo_m = stats.mean()
                zo_s = stats.gsl_stats_wsd()

                o_s = math.sqrt(fo_s**2 + so_s**2 + zo_s**2)

            pg_bc_dists.append(dist_m)
            all_bc_dist.extend(dists)
            pg_normal_angle_sigmas.append(norm_angle_s)
            pg_rnormal_angle_sigmas.append(rnorm_angle_s)
            pg_tnormal_angle_sigmas.append(tnorm_angle_s)
            pg_rot_z_sigmas.append(rotz_s)
            pg_f_offset_sigmas.append(fo_s)
            pg_s_offset_sigmas.append(so_s)
            pg_z_offset_sigmas.append(zo_s)
            pg_offset_sigmas.append(o_s)
            z_offsets_d[pg1.get_name()] = zo_m

            congruence_table_data.append([
                "%d" % pg_id,
                "%5.1f" % dist_m,  #"%.4f"%dist_s,
                "%.4f" % delta_norm_angle,
                "%.4f" % rdelta_norm_angle,
                "%.4f" % tdelta_norm_angle,
                "%.4f" % z_angle,
                "%4.1f" % fd,
                "%4.1f" % sd,
                "%4.1f" % zd,
                "%4.1f" % od,
                "%6d" % total_refls
            ])
            detector_table_data.append([
                "%d" % pg_id,
                "%5.1f" % dist_m,  #"%.4f"%dist_s,
                "%.4f" % norm_angle_m,
                "%.4f" % norm_angle_s,
                "%.4f" % rnorm_angle_m,
                "%.4f" % rnorm_angle_s,
                "%.4f" % tnorm_angle_m,
                "%.4f" % tnorm_angle_s,
                "%10.6f" % rotz_m,
                "%.6f" % rotz_s,
                #"%9.1f"%fo_m, "%5.3f"%fo_s,
                #"%9.1f"%so_m, "%5.3f"%so_s,
                "%9.3f" % fo_s,
                "%9.3f" % so_s,
                "%9.1f" % zo_m,
                "%9.1f" % zo_s,
                "%9.3f" % o_s,
                "%6d" % total_refls
            ])

        # Set up table output
        table_d = {
            d: row
            for d, row in zip(pg_bc_dists, congruence_table_data)
        }
        table_header = [
            "PanelG", "Dist", "Normal", "RNormal", "TNormal", "Z rot", "Delta",
            "Delta", "Delta", "Delta", "N"
        ]
        table_header2 = [
            "Id", "", "Angle", "Angle", "Angle", "Angle", "F", "S", "Z", "O",
            "Refls"
        ]
        table_header3 = [
            "", "(mm)", "(mm)", "(deg)", "(deg)", "(microns)", "(microns)",
            "(microns)", "(microns)", "(microns)", ""
        ]
        congruence_table_data = [table_header, table_header2, table_header3]
        congruence_table_data.extend([table_d[key] for key in sorted(table_d)])

        table_d = {d: row for d, row in zip(pg_bc_dists, detector_table_data)}
        table_header = [
            "PanelG", "Dist", "Normal", "Normal", "RNormal", "RNormal",
            "TNormal", "TNormal", "RotZ", "RotZ", "F Offset", "S Offset",
            "Z Offset", "Z Offset", "Offset", "N"
        ]
        table_header2 = [
            "Id", "", "", "Sigma", "", "Sigma", "", "Sigma", "", "Sigma",
            "Sigma", "Sigma", "", "Sigma", "Sigma", "Refls"
        ]
        table_header3 = [
            "", "(mm)", "(deg)", "(deg)", "(deg)", "(deg)", "(deg)", "(deg)",
            "(deg)", "(deg)", "(microns)", "(microns)", "(microns)",
            "(microns)", "(microns)", ""
        ]
        detector_table_data = [table_header, table_header2, table_header3]
        detector_table_data.extend([table_d[key] for key in sorted(table_d)])

        table_d = {d: row for d, row in zip(pg_bc_dists, rmsds_table_data)}
        table_header = ["PanelG"]
        table_header2 = ["Id"]
        table_header3 = [""]
        for i in xrange(len(detectors)):
            table_header.extend(["D%d" % i] * 4)
            table_header2.extend(["RMSD", "rRMSD", "tRMSD", "N refls"])
            table_header3.extend(["(microns)"] * 3)
            table_header3.append("")
        rmsds_table_data = [table_header, table_header2, table_header3]
        rmsds_table_data.extend([table_d[key] for key in sorted(table_d)])

        if len(all_refls_count) > 1:
            r1 = ["Weighted mean"]
            r2 = ["Weighted stddev"]
            r1.append("")
            r2.append("")
            #r1.append("")
            #r2.append("")
            stats = flex.mean_and_variance(all_delta_normals,
                                           all_refls_count.as_double())
            r1.append("%.4f" % stats.mean())
            r2.append("%.4f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_rdelta_normals,
                                           all_refls_count.as_double())
            r1.append("%.4f" % stats.mean())
            r2.append("%.4f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_tdelta_normals,
                                           all_refls_count.as_double())
            r1.append("%.4f" % stats.mean())
            r2.append("%.4f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_z_angles,
                                           all_refls_count.as_double())
            r1.append("%.4f" % stats.mean())
            r2.append("%.4f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_f_deltas,
                                           all_refls_count.as_double())
            r1.append("%4.1f" % stats.mean())
            r2.append("%4.1f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_s_deltas,
                                           all_refls_count.as_double())
            r1.append("%4.1f" % stats.mean())
            r2.append("%4.1f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_z_deltas,
                                           all_refls_count.as_double())
            r1.append("%4.1f" % stats.mean())
            r2.append("%4.1f" % stats.gsl_stats_wsd())
            stats = flex.mean_and_variance(all_deltas,
                                           all_refls_count.as_double())
            r1.append("%4.1f" % stats.mean())
            r2.append("%4.1f" % stats.gsl_stats_wsd())
            r1.append("")
            r2.append("")
            congruence_table_data.append(r1)
            congruence_table_data.append(r2)
            congruence_table_data.append([
                "Mean", "", "", "", "", "", "", "", "", "", "",
                "%6.1f" % flex.mean(all_refls_count.as_double())
            ])

        from libtbx import table_utils
        print "Congruence statistics, I.E. the differences between the input detectors:"
        print table_utils.format(congruence_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")

        print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, statistics are computed between the matching panel groups between the two input experiments."
        print "Dist: distance from center of panel group to the beam center"
        print "Dist Sigma: weighted standard deviation of the measurements used to compute Dist"
        print "Normal angle: angle between the normal vectors of matching panel groups."
        print "RNormal angle: radial component of the angle between the normal vectors of matching panel groups"
        print "TNormal angle: transverse component of the angle between the normal vectors of matching panel groups"
        print "Z rot: angle between the XY components of the fast axes of the panel groups."
        print "Delta F: shift between matching panel groups along the detector fast axis."
        print "Delta S: shift between matching panel groups along the detector slow axis."
        print "Delta Z: Z shift between matching panel groups along the detector normal."
        print "Delta O: Overall shift between matching panel groups along the detector normal."
        print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
        print
        print

        if len(all_weights) > 1:
            r1 = ["All"]
            r2 = ["Mean"]
            for data, weights, fmt in [
                [None, None, None],
                    #[None,None,None],
                [all_normal_angles,
                 all_weights.as_double(), "%.4f"],
                [pg_normal_angle_sigmas,
                 all_refls_count.as_double(), "%.4f"],
                [all_rnormal_angles,
                 all_weights.as_double(), "%.4f"],
                [pg_rnormal_angle_sigmas,
                 all_refls_count.as_double(), "%.4f"],
                [all_tnormal_angles,
                 all_weights.as_double(), "%.4f"],
                [pg_tnormal_angle_sigmas,
                 all_refls_count.as_double(), "%.4f"],
                [all_rot_z, all_weights.as_double(), "%10.6f"],
                [pg_rot_z_sigmas,
                 all_refls_count.as_double(), "%.6f"],
                    #[all_f_offsets,           all_weights.as_double(),     "%9.1f"],
                [pg_f_offset_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                    #[all_s_offsets,           all_weights.as_double(),     "%9.1f"],
                [pg_s_offset_sigmas,
                 all_refls_count.as_double(), "%9.3f"],
                [all_z_offsets,
                 all_weights.as_double(), "%9.1f"],
                [pg_z_offset_sigmas,
                 all_refls_count.as_double(), "%9.1f"],
                [pg_offset_sigmas,
                 all_refls_count.as_double(), "%9.1f"]
            ]:

                r2.append("")
                if data is None and weights is None:
                    r1.append("")
                    continue
                stats = flex.mean_and_variance(data, weights)
                r1.append(fmt % stats.mean())

            r1.append("")
            r2.append("%6.1f" % flex.mean(all_refls_count.as_double()))
            detector_table_data.append(r1)
            detector_table_data.append(r2)

        print "Detector statistics, I.E. measurements of parameters relative to the detector plane:"
        print table_utils.format(detector_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")

        print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
        print "Dist: distance from center of panel group to the beam center"
        print "Dist Sigma: weighted standard deviation of the measurements used to compute Dist"
        print "Normal Angle: angle between the normal vector of the detector at its root hierarchy level and the normal of the panel group"
        print "RNormal Angle: radial component of Normal Angle"
        print "TNormal Angle: transverse component of Normal Angle"
        print "RotZ: deviation from 90 degrees of the rotation of each panel group around the detector normal"
        print "F Offset: offset of panel group along the detector's fast axis"
        print "S Offset: offset of panel group along the detector's slow axis"
        print "Z Offset: offset of panel group along the detector normal"
        print "Offset: offset of panel group in F,S,Z space. Sigma is F, S, Z offset sigmas summed in quadrature."
        print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
        print "All: weighted mean of the values shown"
        print
        print "Sigmas in this table are computed using the standard deviation of 2 measurements (I.E. a panel's Z Offset is measured twice, once in each input dataset). This is related by a factor of sqrt(2)/2 to the mean of the Delta Z parameter in the congruence statistics table above, which is the difference between Z parameters."
        print

        row = ["Overall"]
        for refls in reflections:
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['difference_vector_norms']) / len(refls)) *
                                  1000))
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['radial_displacements']) / len(refls)) *
                                  1000))
            row.append("%6.1f" % (math.sqrt(
                flex.sum_sq(refls['transverse_displacements']) / len(refls)) *
                                  1000))
            row.append("%8d" % len(refls))
        rmsds_table_data.append(row)

        print "RMSDs by detector number"
        print table_utils.format(rmsds_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")
        print "PanelG Id: panel group id or panel id, depending on hierarchy_level"
        print "RMSD: root mean squared deviation between observed and predicted spot locations"
        print "rRMSD: RMSD of radial components of the observed-predicted vectors"
        print "tRMSD: RMSD of transverse components of the observed-predicted vectors"
        print "N refls: number of reflections"

        # Show stats for detector hierarchy root
        def _print_vector(v):
            for i in v:
                print "%10.5f" % i,
            print

        for d_id, d in enumerate(detectors):
            ori = d.hierarchy().get_origin()
            norm = d.hierarchy().get_normal()
            fast = d.hierarchy().get_fast_axis()
            slow = d.hierarchy().get_slow_axis()
            print "Detector", d_id, "origin:   ",
            _print_vector(ori)
            print "Detector", d_id, "normal:   ",
            _print_vector(norm)
            print "Detector", d_id, "fast axis:",
            _print_vector(fast)
            print "Detector", d_id, "slow axis:",
            _print_vector(slow)

        # Unit cell statstics
        lengths = flex.vec3_double()
        angles = flex.vec3_double()
        weights = flex.double()
        for refls, expts in zip(reflections,
                                [d.data for d in params.input.experiments]):
            for crystal_id, crystal in enumerate(expts.crystals()):
                lengths.append(crystal.get_unit_cell().parameters()[0:3])
                angles.append(crystal.get_unit_cell().parameters()[3:6])
                weights.append(len(refls.select(refls['id'] == crystal_id)))

        print "Unit cell stats (angstroms and degrees), weighted means and standard deviations"
        for subset, tags in zip([lengths, angles],
                                [["Cell a", "Cell b", "Cell c"],
                                 ["Cell alpha", "Cell beta", "Cell gamma"]]):
            for data, tag in zip(subset.parts(), tags):
                stats = flex.mean_and_variance(data, weights)
                print "%s %5.1f +/- %6.3f" % (tag, stats.mean(),
                                              stats.gsl_stats_wsd())

        if params.tag is None:
            tag = ""
        else:
            tag = "%s " % params.tag

        if params.show_plots:
            # Plot the results
            detector_plot_dict(self.params,
                               detectors[0],
                               refl_counts,
                               u"%sN reflections" % tag,
                               u"%6d",
                               show=False)
            #detector_plot_dict(self.params, detectors[0], delta_normals, u"%sAngle between normal vectors (\N{DEGREE SIGN})"%tag, u"%.2f\N{DEGREE SIGN}", show=False)
            detector_plot_dict(
                self.params,
                detectors[0],
                z_angles,
                u"%sZ rotation angle between panels (\N{DEGREE SIGN})" % tag,
                u"%.2f\N{DEGREE SIGN}",
                show=False)
            detector_plot_dict(
                self.params,
                detectors[0],
                f_deltas,
                u"%sFast displacements between panels (microns)" % tag,
                u"%4.1f",
                show=False)
            detector_plot_dict(
                self.params,
                detectors[0],
                s_deltas,
                u"%sSlow displacements between panels (microns)" % tag,
                u"%4.1f",
                show=False)
            detector_plot_dict(self.params,
                               detectors[0],
                               z_offsets_d,
                               u"%sZ offsets along detector normal (microns)" %
                               tag,
                               u"%4.1f",
                               show=False)
            detector_plot_dict(self.params,
                               detectors[0],
                               z_deltas,
                               u"%sZ displacements between panels (microns)" %
                               tag,
                               u"%4.1f",
                               show=False)
            detector_plot_dict(
                self.params,
                detectors[0],
                o_deltas,
                u"%sOverall displacements between panels (microns)" % tag,
                u"%4.1f",
                show=False)
            plt.show()
    def run(self):
        ''' Parse the options. '''
        from dials.util.options import flatten_experiments, flatten_datablocks, flatten_reflections
        # Parse the command line arguments
        params, options = self.parser.parse_args(show_diff_phil=True)
        self.params = params
        experiments = flatten_experiments(params.input.experiments)
        datablocks = flatten_datablocks(params.input.datablock)
        reflections = flatten_reflections(params.input.reflections)

        # Find all detector objects
        detectors = []
        detectors.extend(experiments.detectors())
        dbs = []
        for datablock in datablocks:
            dbs.extend(datablock.unique_detectors())
        detectors.extend(dbs)

        # Verify inputs
        if len(detectors) != 2:
            raise Sorry(
                "Please provide a reference and a moving set of experiments and or datablocks"
            )

        reflections = reflections[1]
        detector = detectors[1]

        if not hasattr(detector, 'hierarchy'):
            raise Sorry("Script intended for hierarchical detectors")

        if params.max_hierarchy_level is None or str(
                params.max_hierarchy_level).lower() == 'auto':
            params.max_hierarchy_level = 0
            root = detector.hierarchy()
            while root.is_group():
                root = root[0]
                params.max_hierarchy_level += 1
            print "Found", params.max_hierarchy_level + 1, "hierarchy levels"

        reference_root = detectors[0].hierarchy()
        moving_root = detector.hierarchy()
        rori = get_center(reference_root)
        rf = col(reference_root.get_fast_axis())
        rs = col(reference_root.get_slow_axis())
        r_norm = col(reference_root.get_normal())
        s0 = col(
            flex.vec3_double([col(b.get_s0())
                              for b in experiments.beams()]).mean())

        summary_table_header = [
            "Hierarchy", "Delta XY", "Delta XY", "R Offsets", "R Offsets",
            "T Offsets", "T Offsets", "Z Offsets", "Z Offsets", "dR Norm",
            "dR Norm", "dT Norm", "dT Norm", "Local dNorm", "Local dNorm",
            "Rot Z", "Rot Z"
        ]
        summary_table_header2 = [
            "Level", "", "Sigma", "", "Sigma", "", "Sigma", "", "Sigma", "",
            "Sigma", "", "Sigma", "", "Sigma", "", "Sigma"
        ]
        summary_table_header3 = [
            "", "(microns)", "(microns)", "(microns)", "(microns)",
            "(microns)", "(microns)", "(microns)", "(microns)", "(deg)",
            "(deg)", "(deg)", "(deg)", "(deg)", "(deg)", "(deg)", "(deg)"
        ]
        summary_table_data = []
        summary_table_data.append(summary_table_header)
        summary_table_data.append(summary_table_header2)
        summary_table_data.append(summary_table_header3)

        table_header = [
            "PanelG", "BC dist", "Delta XY", "R Offsets", "T Offsets",
            "Z Offsets", "dR Norm", "dT Norm", "Local dNorm", "Rot Z",
            "N Refls"
        ]
        table_header2 = [
            "ID", "(mm)", "(microns)", "(microns)", "(microns)", "(microns)",
            "(deg)", "(deg)", "(deg)", "(deg)", ""
        ]

        from xfel.cftbx.detector.cspad_cbf_tbx import basis

        def get_full_basis_shift(pg):
            """Compute basis shift from pg to lab space"""
            shift = basis(panelgroup=pg)
            while True:
                parent = pg.parent()
                if parent is None:
                    break
                shift = basis(panelgroup=parent) * shift
                pg = parent
            return shift

        # Iterate through the hierarchy levels
        for level in range(params.max_hierarchy_level + 1):
            delta_xy = flex.double()
            r_offsets = flex.double()
            t_offsets = flex.double()
            z_offsets = flex.double()
            rot_z = flex.double()
            delta_r_norm = flex.double()
            delta_t_norm = flex.double()
            local_dnorm = flex.double()
            bc_dists = flex.double()
            weights = flex.double()

            rows = []

            for pg_id, (pg1, pg2) in enumerate(
                    zip(iterate_detector_at_level(reference_root, 0, level),
                        iterate_detector_at_level(moving_root, 0, level))):
                weight = 0
                for panel_id, p in enumerate(iterate_panels(pg2)):
                    weight += len(
                        reflections.select(
                            reflections['panel'] == id_from_name(
                                detector, p.get_name())))
                weights.append(weight)

                bc = col(pg1.get_beam_centre_lab(s0))
                ori = get_center(pg1)
                bc_dist = (ori - bc).length()
                bc_dists.append(bc_dist)

                z_dists = []
                ori_xy = []
                for pg in [pg1, pg2]:
                    ori = pg.get_local_origin()
                    ori_xy.append(col((ori[0], ori[1])))
                    z_dists.append(ori[2] * 1000)
                dxy = (ori_xy[1] - ori_xy[0]).length() * 1000
                delta_xy.append(dxy)

                z_off = z_dists[1] - z_dists[0]
                z_offsets.append(z_off)

                pgo1 = col(pg1.get_origin())
                ro_pgo = pgo1 - rori  # vector from the detector origin to the panel group origin
                if ro_pgo.length() == 0:
                    radial = col((0, 0, 0))
                    transverse = col((0, 0, 0))
                else:
                    radial = (
                        (rf.dot(ro_pgo) * rf) + (rs.dot(ro_pgo) * rs)
                    ).normalize()  # component of ro_pgo in rf rs plane
                    transverse = r_norm.cross(radial).normalize()
                # now radial and transverse are vectors othogonal to each other and the detector normal, such that
                # radial points at the panel group origin

                # compute shift in local frame, then convert that shift to lab space, then make it relative to the reference's origin, in lab space
                lpgo1 = col(pg1.get_local_origin())
                lpgo2 = col(pg2.get_local_origin())
                delta_pgo = (get_full_basis_shift(pg1) *
                             (lpgo2 - lpgo1)) - pgo1

                # v is the component of delta_pgo along the radial vector
                v = (radial.dot(delta_pgo) * radial)
                r_offset = v.length() * 1000
                angle = r_norm.angle(v, deg=True)
                if r_norm.cross(v).dot(transverse) < 0:
                    r_offset = -r_offset
                r_offsets.append(r_offset)
                # v is the component of delta_pgo along the transverse vector
                v = (transverse.dot(delta_pgo) * transverse)
                t_offset = v.length() * 1000
                angle = r_norm.angle(v, deg=True)
                if r_norm.cross(v).dot(radial) < 0:
                    t_offset = -t_offset
                t_offsets.append(t_offset)

                pgn1 = col(pg1.get_normal())
                pgf1 = col(pg1.get_fast_axis())
                pgs1 = col(pg1.get_slow_axis())
                pgn2 = col(pg2.get_normal())
                pgf2 = col(pg2.get_fast_axis())

                # v1 and v2 are the component of pgf1 and pgf2 in the rf rs plane
                v1 = (rf.dot(pgf1) * rf) + (rs.dot(pgf1) * rs)
                v2 = (rf.dot(pgf2) * rf) + (rs.dot(pgf2) * rs)
                rz = v1.angle(v2, deg=True)
                rot_z.append(rz)

                # v1 and v2 are the components of pgn1 and pgn2 in the r_norm radial plane
                v1 = (r_norm.dot(pgn1) * r_norm) + (radial.dot(pgn1) * radial)
                v2 = (r_norm.dot(pgn2) * r_norm) + (radial.dot(pgn2) * radial)
                drn = v1.angle(v2, deg=True)
                if v2.cross(v1).dot(transverse) < 0:
                    drn = -drn
                delta_r_norm.append(drn)

                # v1 and v2 are the components of pgn1 and pgn2 in the r_norm transverse plane
                v1 = (r_norm.dot(pgn1) * r_norm) + (transverse.dot(pgn1) *
                                                    transverse)
                v2 = (r_norm.dot(pgn2) * r_norm) + (transverse.dot(pgn2) *
                                                    transverse)
                dtn = v1.angle(v2, deg=True)
                if v2.cross(v1).dot(radial) < 0:
                    dtn = -dtn
                delta_t_norm.append(dtn)

                # Determine angle between normals in local space
                lpgf1 = col(pg1.get_local_fast_axis())
                lpgs1 = col(pg1.get_local_slow_axis())
                lpgn1 = lpgf1.cross(lpgs1)
                lpgf2 = col(pg2.get_local_fast_axis())
                lpgs2 = col(pg2.get_local_slow_axis())
                lpgn2 = lpgf2.cross(lpgs2)
                ldn = lpgn1.angle(lpgn2, deg=True)
                local_dnorm.append(ldn)

                row = [
                    "%3d" % pg_id,
                    "%6.1f" % bc_dist,
                    "%6.1f" % dxy,
                    "%6.1f" % r_offset,
                    "%6.1f" % t_offset,
                    "%6.1f" % z_off,
                    "%.4f" % drn,
                    "%.4f" % dtn,
                    "%.4f" % ldn,
                    "%.4f" % rz,
                    "%8d" % weight
                ]
                rows.append(row)

            wm_row = ["Weighted mean", ""]
            ws_row = ["Weighted stddev", ""]
            s_row = ["%d" % level]
            iterable = zip([
                delta_xy, r_offsets, t_offsets, z_offsets, delta_r_norm,
                delta_t_norm, local_dnorm, rot_z
            ], [
                "%6.1f", "%6.1f", "%6.1f", "%6.1f", "%.4f", "%.4f", "%.4f",
                "%.4f"
            ])
            if len(z_offsets) == 0:
                wm_row.extend(["%6.1f" % 0] * 8)
                ws_row.extend(["%6.1f" % 0] * 8)
                s_row.extend(["%6.1f" % 0] * 8)
            elif len(z_offsets) == 1:
                for data, fmt in iterable:
                    wm_row.append(fmt % data[0])
                    ws_row.append(fmt % 0)
                    s_row.append(fmt % data[0])
                    s_row.append(fmt % 0)
            else:
                for data, fmt in iterable:
                    stats = flex.mean_and_variance(data, weights)
                    wm_row.append(fmt % stats.mean())
                    ws_row.append(fmt % stats.gsl_stats_wsd())
                    s_row.append(fmt % stats.mean())
                    s_row.append(fmt % stats.gsl_stats_wsd())
            wm_row.append("")
            ws_row.append("")
            summary_table_data.append(s_row)

            table_data = [table_header, table_header2]
            table_d = {d: row for d, row in zip(bc_dists, rows)}
            table_data.extend([table_d[key] for key in sorted(table_d)])
            table_data.append(wm_row)
            table_data.append(ws_row)

            from libtbx import table_utils
            print "Hierarchy level %d Detector shifts" % level
            print table_utils.format(table_data,
                                     has_header=2,
                                     justify='center',
                                     delim=" ")

        print "Detector shifts summary"
        print table_utils.format(summary_table_data,
                                 has_header=3,
                                 justify='center',
                                 delim=" ")

        print
        print """
Beispiel #43
0
    def run(self):
        '''Execute the script.'''
        from dxtbx.datablock import DataBlockTemplateImporter
        from dials.util.options import flatten_datablocks
        from dials.util import log
        from logging import info
        from time import time
        from libtbx.utils import Abort

        # Parse the command line
        params, options = self.parser.parse_args(show_diff_phil=False)
        datablocks = flatten_datablocks(params.input.datablock)

        # Check we have some filenames
        if len(datablocks) == 0:

            # Check if a template has been set and print help if not, otherwise try to
            # import the images based on the template input
            if len(params.input.template) == 0:
                self.parser.print_help()
                exit(0)
            else:
                importer = DataBlockTemplateImporter(params.input.template,
                                                     options.verbose)
                datablocks = importer.datablocks

        # Save the options
        self.options = options
        self.params = params
        self.load_reference_geometry()

        st = time()

        # Import stuff
        if len(datablocks) == 0:
            raise Abort('No datablocks specified')
        elif len(datablocks) > 1:
            raise Abort('Only 1 datablock can be processed at a time.')
        datablock = datablocks[0]

        if self.reference_detector is not None:
            for imageset in datablock.extract_imagesets():
                imageset.set_detector(self.reference_detector)

        # Configure logging
        log.config(params.verbosity,
                   info='dials.process.log',
                   debug='dials.process.debug.log')

        # Log the diff phil
        diff_phil = self.parser.diff_phil.as_str()
        if diff_phil is not '':
            info('The following parameters have been modified:\n')
            info(diff_phil)

        if self.params.output.datablock_filename:
            from dxtbx.datablock import DataBlockDumper
            dump = DataBlockDumper(datablock)
            dump.as_json(self.params.output.datablock_filename)

        # Do the processing
        observed = self.find_spots(datablock)
        experiments, indexed = self.index(datablock, observed)
        experiments, indexed = self.refine(experiments, indexed)
        integrated = self.integrate(experiments, indexed)

        # Total Time
        info("")
        info("Total Time Taken = %f seconds" % (time() - st))
    def run(self):
        ''' Extract the shoeboxes. '''
        from dials.util.options import flatten_reflections
        from dials.util.options import flatten_experiments
        from dials.util.options import flatten_datablocks
        from dials.util import log
        from dials.array_family import flex
        from libtbx.utils import Sorry

        # Parse the command line
        params, options = self.parser.parse_args(show_diff_phil=False)

        # Configure logging
        log.config()

        # Log the diff phil
        diff_phil = self.parser.diff_phil.as_str()
        if diff_phil is not '':
            logger.info('The following parameters have been modified:\n')
            logger.info(diff_phil)

        # Get the data
        reflections = flatten_reflections(params.input.reflections)
        experiments = flatten_experiments(params.input.experiments)
        datablocks = flatten_datablocks(params.input.datablock)
        if not any([experiments, datablocks, reflections]):
            self.parser.print_help()
            exit(0)
        elif experiments and datablocks:
            raise Sorry('Both experiment list and datablocks set')
        elif len(experiments) > 1:
            raise Sorry('More than 1 experiment set')
        elif len(datablocks) > 1:
            raise Sorry('More than 1 datablock set')
        elif len(experiments) == 1:
            imageset = experiments[0].imageset
        elif len(datablocks) == 1:
            imagesets = datablocks[0].extract_imagesets()
            if len(imagesets) != 1:
                raise Sorry('Need 1 imageset, got %d' % len(imagesets))
            imageset = imagesets[0]
        if len(reflections) != 1:
            raise Sorry('Need 1 reflection table, got %d' % len(reflections))
        else:
            reflections = reflections[0]

        # Check the reflections contain the necessary stuff
        assert ("bbox" in reflections)
        assert ("panel" in reflections)

        # Get some models
        detector = imageset.get_detector()
        scan = imageset.get_scan()
        frame0, frame1 = scan.get_array_range()

        # Add some padding but limit to image volume
        if params.padding > 0:
            logger.info('Adding %d pixels as padding' % params.padding)
            x0, x1, y0, y1, z0, z1 = reflections['bbox'].parts()
            x0 -= params.padding
            x1 += params.padding
            y0 -= params.padding
            y1 += params.padding
            panel = reflections['panel']
            for i in range(len(reflections)):
                width, height = detector[panel[i]].get_image_size()
                if x0[i] < 0: x0[i] = 0
                if x1[i] > width: x1[i] = width
                if y0[i] < 0: y0[i] = 0
                if y1[i] > height: y1[i] = height
            reflections['bbox'] = flex.int6(x0, x1, y0, y1, z0, z1)

        # Now iterate through the images, masking the shoebox pixels as necessary,
        # then model the background & compute intensity statistics

        x0, x1, y0, y1, z0, z1 = reflections['bbox'].parts()

        for frame in range(frame0, frame1):
            data = imageset.get_raw_data(frame)[0]
            mask = (data < 0)
            data = data.as_double()
            sel = (z0 <= frame) & (z1 >= frame)
            _x0 = x0.select(sel)
            _x1 = x1.select(sel)
            _y0 = y0.select(sel)
            _y1 = y1.select(sel)
            for __x0, __x1, __y0, __y1 in zip(_x0, _x1, _y0, _y1):
                for y in range(__y0, __y1):
                    for x in range(__x0, __x1):
                        data[y, x] = 0.0
                        mask[y, x] = True
            imask = (~mask).as_1d().as_int()
            imask.reshape(data.accessor())

            from dials.algorithms.image.filter import summed_area

            # smooth over scale corresponding to max shoebox size
            d = max(flex.max(_x1 - _x0), flex.max(_y1 - _y0))

            summed_background = summed_area(data, (d, d))
            summed_mask = summed_area(imask, (d, d))
            mean_background = (summed_background / summed_mask.as_double())
            data.as_1d().set_selected(mask.as_1d(), mean_background.as_1d())
            print flex.sum(data.select(data.as_1d() > 0))
def run(args):

    from dials.util.options import OptionParser
    from dials.util.options import flatten_datablocks
    from dials.util.options import flatten_experiments
    from dials.util.options import flatten_reflections
    import libtbx.load_env

    usage = "%s [options] datablock.json reflections.pickle" % (
        libtbx.env.dispatcher_name)

    parser = OptionParser(usage=usage,
                          phil=phil_scope,
                          read_datablocks=True,
                          read_experiments=True,
                          read_reflections=True,
                          check_format=False,
                          epilog=help_message)

    params, options = parser.parse_args(show_diff_phil=True)
    datablocks = flatten_datablocks(params.input.datablock)
    experiments = flatten_experiments(params.input.experiments)
    reflections = flatten_reflections(params.input.reflections)

    if (len(datablocks) == 0
            and len(experiments) == 0) or len(reflections) == 0:
        parser.print_help()
        exit(0)

    if len(datablocks) == 0 and len(experiments) > 0:
        imagesets = experiments.imagesets()
        crystals = experiments.crystals()
    else:
        imagesets = []
        crystals = None
        for datablock in datablocks:
            imagesets.extend(datablock.extract_imagesets())

    if len(reflections) > 1:
        assert len(reflections) == len(imagesets)
        for i in range(len(reflections)):
            reflections[i]['imageset_id'] = flex.int(len(reflections[i]), i)
            if i > 0:
                reflections[0].extend(reflections[i])
    elif 'imageset_id' not in reflections[0]:
        reflections[0]['imageset_id'] = reflections[0]['id']
        reflections[0]['id'] = flex.int(reflections[0].size(), -1)

    reflections = reflections[0]

    import wxtbx.app
    a = wxtbx.app.CCTBXApp(0)
    a.settings = params
    f = ReciprocalLatticeViewer(None,
                                -1,
                                "Reflection data viewer",
                                size=(1024, 768))
    f.load_models(imagesets, reflections, crystals)
    f.Show()
    a.SetTopWindow(f)
    #a.Bind(wx.EVT_WINDOW_DESTROY, lambda evt: tb_icon.Destroy(), f)
    a.MainLoop()
Beispiel #46
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_experiments
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_reflections
  import libtbx.load_env

  usage = "%s [options] datablock.json | experiments.json | image_*.cbf" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_experiments=True,
    read_datablocks=True,
    read_datablocks_from_images=True,
    read_reflections=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  experiments = flatten_experiments(params.input.experiments)
  datablocks = flatten_datablocks(params.input.datablock)
  reflections = flatten_reflections(params.input.reflections)

  if len(datablocks) == 0 and len(experiments) == 0 and len(reflections) == 0:
    parser.print_help()
    exit()

  for i_expt, expt in enumerate(experiments):
    print "Experiment %i:" %i_expt
    print str(expt.detector)
    print 'Max resolution (at corners): %f' % (
      expt.detector.get_max_resolution(expt.beam.get_s0()))
    print 'Max resolution (inscribed):  %f' % (
      expt.detector.get_max_inscribed_resolution(expt.beam.get_s0()))
    print ''
    panel_id, (x, y) = beam_centre(expt.detector, expt.beam)
    if panel_id >= 0 and x is not None and y is not None:
      if len(expt.detector) > 1:
        beam_centre_str = "Beam centre: panel %i, (%.2f,%.2f)" %(panel_id, x, y)
      else:
        beam_centre_str = "Beam centre: (%.2f,%.2f)" %(x, y)
    else:
      beam_centre_str = ""
    print str(expt.beam) + beam_centre_str + '\n'
    if expt.scan is not None:
      print expt.scan
    if expt.goniometer is not None:
      print expt.goniometer
    expt.crystal.show(show_scan_varying=params.show_scan_varying)
    if expt.crystal.num_scan_points:
      from scitbx.array_family import flex
      from cctbx import uctbx
      abc = flex.vec3_double()
      angles = flex.vec3_double()
      for n in range(expt.crystal.num_scan_points):
        a, b, c, alpha, beta, gamma = expt.crystal.get_unit_cell_at_scan_point(n).parameters()
        abc.append((a, b, c))
        angles.append((alpha, beta, gamma))
      a, b, c = abc.mean()
      alpha, beta, gamma = angles.mean()
      mean_unit_cell = uctbx.unit_cell((a, b, c, alpha, beta, gamma))
      print "  Average unit cell: %s" %mean_unit_cell
    print

  for datablock in datablocks:
    if datablock.format_class() is not None:
      print 'Format: %s' %datablock.format_class()
    imagesets = datablock.extract_imagesets()
    for imageset in imagesets:
      try: print imageset.get_template()
      except Exception: pass
      detector = imageset.get_detector()
      print str(detector) + 'Max resolution: %f\n' %(
        detector.get_max_resolution(imageset.get_beam().get_s0()))
      if params.show_panel_distance:
        for ipanel, panel in enumerate(detector):
          from scitbx import matrix
          fast = matrix.col(panel.get_fast_axis())
          slow = matrix.col(panel.get_slow_axis())
          normal = fast.cross(slow)
          origin = matrix.col(panel.get_origin())
          distance = origin.dot(normal)
          fast_origin = - (origin - distance * normal).dot(fast)
          slow_origin = - (origin - distance * normal).dot(slow)
          print 'Panel %d: distance %.2f origin %.2f %.2f' % \
            (ipanel, distance, fast_origin, slow_origin)
        print ''
      panel_id, (x, y) = beam_centre(detector, imageset.get_beam())
      if panel_id >= 0 and x is not None and y is not None:
        if len(detector) > 1:
          beam_centre_str = "Beam centre: panel %i, (%.2f,%.2f)" %(panel_id, x, y)
        else:
          beam_centre_str = "Beam centre: (%.2f,%.2f)" %(x, y)
      else:
        beam_centre_str = ""
      print str(imageset.get_beam()) + beam_centre_str + '\n'
      if imageset.get_scan() is not None:
        print imageset.get_scan()
      if imageset.get_goniometer() is not None:
        print imageset.get_goniometer()

  from libtbx.containers import OrderedDict, OrderedSet
  formats = OrderedDict([
    ('miller_index', '%i, %i, %i'),
    ('d','%.2f'),
    ('dqe','%.3f'),
    ('id','%i'),
    ('imageset_id','%i'),
    ('panel','%i'),
    ('flags', '%i'),
    ('background.mean', '%.1f'),
    ('background.dispersion','%.1f'),
    ('background.mse', '%.1f'),
    ('background.sum.value', '%.1f'),
    ('background.sum.variance', '%.1f'),
    ('intensity.prf.value','%.1f'),
    ('intensity.prf.variance','%.1f'),
    ('intensity.sum.value','%.1f'),
    ('intensity.sum.variance','%.1f'),
    ('intensity.cor.value','%.1f'),
    ('intensity.cor.variance','%.1f'),
    ('lp','%.3f'),
    ('num_pixels.background','%i'),
    ('num_pixels.background_used','%i'),
    ('num_pixels.foreground','%i'),
    ('num_pixels.valid','%i'),
    ('partial_id','%i'),
    ('partiality','%.4f'),
    ('profile.correlation','%.3f'),
    ('profile.rmsd','%.3f'),
    ('xyzcal.mm','%.2f, %.2f, %.2f'),
    ('xyzcal.px','%.2f, %.2f, %.2f'),
    ('delpsical.rad','%.3f'),
    ('delpsical2','%.3f'),
    ('xyzobs.mm.value','%.2f, %.2f, %.2f'),
    ('xyzobs.mm.variance','%.4e, %.4e, %.4e'),
    ('xyzobs.px.value','%.2f, %.2f, %.2f'),
    ('xyzobs.px.variance','%.4f, %.4f, %.4f'),
    ('s1','%.4f, %.4f, %.4f'),
    ('rlp','%.4f, %.4f, %.4f'),
    ('zeta','%.3f'),
    ('x_resid','%.3f'),
    ('x_resid2','%.3f'),
    ('y_resid','%.3f'),
    ('y_resid2','%.3f'),
    ])

  for rlist in reflections:
    from cctbx.array_family import flex
    print
    print "Reflection list contains %i reflections" %(len(rlist))
    rows = [["Column", "min", "max", "mean"]]
    for k, col in rlist.cols():
      if type(col) in (flex.double, flex.int, flex.size_t):
        if type(col) in (flex.int, flex.size_t):
          col = col.as_double()
        rows.append([k, formats[k] %flex.min(col), formats[k] %flex.max(col),
                     formats[k]%flex.mean(col)])
      elif type(col) in (flex.vec3_double, flex.miller_index):
        if type(col) == flex.miller_index:
          col = col.as_vec3_double()
        rows.append([k, formats[k] %col.min(), formats[k] %col.max(),
                     formats[k]%col.mean()])

    from libtbx import table_utils
    print table_utils.format(rows, has_header=True, prefix="| ", postfix=" |")

  intensity_keys = (
    'miller_index', 'd', 'intensity.prf.value', 'intensity.prf.variance',
    'intensity.sum.value', 'intensity.sum.variance', 'background.mean',
    'profile.correlation', 'profile.rmsd'
  )

  profile_fit_keys = ('miller_index', 'd',)

  centroid_keys = (
    'miller_index', 'd', 'xyzcal.mm', 'xyzcal.px', 'xyzobs.mm.value',
    'xyzobs.mm.variance', 'xyzobs.px.value', 'xyzobs.px.variance'
  )

  keys_to_print = OrderedSet()

  if params.show_intensities:
    for k in intensity_keys: keys_to_print.add(k)
  if params.show_profile_fit:
    for k in profile_fit_keys: keys_to_print.add(k)
  if params.show_centroids:
    for k in centroid_keys: keys_to_print.add(k)
  if params.show_all_reflection_data:
    for k in formats: keys_to_print.add(k)

  def format_column(key, data, format_strings=None):
    if isinstance(data, flex.vec3_double):
      c_strings = [c.as_string(format_strings[i].strip()) for i, c in enumerate(data.parts())]
    elif isinstance(data, flex.miller_index):
      c_strings = [c.as_string(format_strings[i].strip()) for i, c in enumerate(data.as_vec3_double().parts())]
    elif isinstance(data, flex.size_t):
      c_strings = [data.as_int().as_string(format_strings[0].strip())]
    else:
      c_strings = [data.as_string(format_strings[0].strip())]

    column = flex.std_string()
    max_element_lengths = [c.max_element_length() for c in c_strings]
    for i in range(len(c_strings[0])):

      column.append(('%%%is' %len(key)) %', '.join(
        ('%%%is' %max_element_lengths[j]) %c_strings[j][i]
        for j in range(len(c_strings))))
    return column


  if keys_to_print:
    keys = [k for k in keys_to_print if k in rlist]
    rows = [keys]
    max_reflections = len(rlist)
    if params.max_reflections is not None:
      max_reflections = min(len(rlist), params.max_reflections)

    columns = []

    for k in keys:
      columns.append(format_column(k, rlist[k], format_strings=formats[k].split(',')))

    print
    print "Printing %i of %i reflections:" %(max_reflections, len(rlist))
    for j in range(len(columns)):
      key = keys[j]
      width = max(len(key), columns[j].max_element_length())
      print ("%%%is" %width) %key,
    print
    for i in range(max_reflections):
      for j in range(len(columns)):
        print columns[j][i],
      print

  return
Beispiel #47
0
    def run(self):
        '''Execute the script.'''
        from dials.array_family import flex
        from dials.util.options import flatten_datablocks
        from time import time
        from dials.util import log
        from libtbx.utils import Sorry
        start_time = time()

        # Parse the command line
        params, options = self.parser.parse_args(show_diff_phil=False)

        # Configure the logging
        log.config(params.verbosity,
                   info=params.output.log,
                   debug=params.output.debug_log)

        from dials.util.version import dials_version
        logger.info(dials_version())

        # Log the diff phil
        diff_phil = self.parser.diff_phil.as_str()
        if diff_phil is not '':
            logger.info('The following parameters have been modified:\n')
            logger.info(diff_phil)

        # Ensure we have a data block
        datablocks = flatten_datablocks(params.input.datablock)
        if len(datablocks) == 0:
            self.parser.print_help()
            return
        elif len(datablocks) != 1:
            raise Sorry('only 1 datablock can be processed at a time')

        # Loop through all the imagesets and find the strong spots
        reflections = flex.reflection_table.from_observations(
            datablocks[0], params)

        # Delete the shoeboxes
        if not params.output.shoeboxes:
            del reflections['shoebox']

        # ascii spot count per image plot
        from dials.util.ascii_art import spot_counts_per_image_plot

        for i, imageset in enumerate(datablocks[0].extract_imagesets()):
            ascii_plot = spot_counts_per_image_plot(
                reflections.select(reflections['id'] == i))
            if len(ascii_plot):
                logger.info(
                    '\nHistogram of per-image spot count for imageset %i:' % i)
                logger.info(ascii_plot)

        # Save the reflections to file
        logger.info('\n' + '-' * 80)
        reflections.as_pickle(params.output.reflections)
        logger.info('Saved {0} reflections to {1}'.format(
            len(reflections), params.output.reflections))

        # Save the datablock
        if params.output.datablock:
            from dxtbx.datablock import DataBlockDumper
            logger.info('Saving datablocks to {0}'.format(
                params.output.datablock))
            dump = DataBlockDumper(datablocks)
            dump.as_file(params.output.datablock)

        # Print some per image statistics
        if params.per_image_statistics:
            from dials.algorithms.spot_finding import per_image_analysis
            from cStringIO import StringIO
            s = StringIO()
            for i, imageset in enumerate(datablocks[0].extract_imagesets()):
                print >> s, "Number of centroids per image for imageset %i:" % i
                stats = per_image_analysis.stats_imageset(
                    imageset,
                    reflections.select(reflections['id'] == i),
                    resolution_analysis=False)
                per_image_analysis.print_table(stats, out=s)
            logger.info(s.getvalue())

        # Print the time
        logger.info("Time Taken: %f" % (time() - start_time))
Beispiel #48
0
def run(args):

    from dials.util.options import OptionParser
    from dials.util.options import flatten_datablocks
    from dials.util.options import flatten_experiments
    from dials.util.options import flatten_reflections
    from dials.util import log
    import libtbx.load_env

    usage = "%s [options] datablock.json reflections.pickle" % (
        libtbx.env.dispatcher_name)

    parser = OptionParser(usage=usage,
                          phil=phil_scope,
                          read_datablocks=True,
                          read_experiments=True,
                          read_reflections=True,
                          check_format=False,
                          epilog=help_message)

    params, options = parser.parse_args()
    datablocks = flatten_datablocks(params.input.datablock)
    experiments = flatten_experiments(params.input.experiments)
    reflections = flatten_reflections(params.input.reflections)

    if (len(datablocks) == 0
            and len(experiments) == 0) or len(reflections) == 0:
        parser.print_help()
        exit(0)

    # Configure the logging
    log.config(info='dials.rl_png.log')

    # Log the diff phil
    diff_phil = parser.diff_phil.as_str()
    if diff_phil is not '':
        logger.info('The following parameters have been modified:\n')
        logger.info(diff_phil)

    reflections = reflections[0]

    if len(datablocks) == 0 and len(experiments) > 0:
        imagesets = experiments.imagesets()
    else:
        imagesets = []
        for datablock in datablocks:
            imagesets.extend(datablock.extract_imagesets())

    f = ReciprocalLatticePng(settings=params)
    f.load_models(imagesets, reflections)

    imageset = imagesets[0]
    rotation_axis = matrix.col(imageset.get_goniometer().get_rotation_axis())
    s0 = matrix.col(imageset.get_beam().get_s0())

    e1 = rotation_axis.normalize()
    e2 = s0.normalize()
    e3 = e1.cross(e2).normalize()
    #print e1
    #print e2
    #print e3

    f.viewer.plot('rl_rotation_axis.png', n=e1.elems)
    f.viewer.plot('rl_beam_vector', n=e2.elems)
    f.viewer.plot('rl_e3.png', n=e3.elems)

    n_solutions = params.basis_vector_search.n_solutions

    if len(experiments):
        for i, c in enumerate(experiments.crystals()):
            A = c.get_A()
            astar = A[:3]
            bstar = A[3:6]
            cstar = A[6:9]

            direct_matrix = A.inverse()
            a = direct_matrix[:3]
            b = direct_matrix[3:6]
            c = direct_matrix[6:9]

            prefix = ''
            if len(experiments.crystals()) > 1:
                prefix = '%i_' % (i + 1)

            f.viewer.plot('rl_%sa.png' % prefix, n=a)
            f.viewer.plot('rl_%sb.png' % prefix, n=b)
            f.viewer.plot('rl_%sc.png' % prefix, n=c)

    elif n_solutions:
        from dials.command_line.discover_better_experimental_model \
             import run_dps, dps_phil_scope

        hardcoded_phil = dps_phil_scope.extract()
        hardcoded_phil.d_min = params.d_min
        result = run_dps((imagesets[0], reflections, hardcoded_phil))
        solutions = [matrix.col(v) for v in result['solutions']]
        for i in range(min(n_solutions, len(solutions))):
            v = solutions[i]
            #if i > 0:
            #for v1 in solutions[:i-1]:
            #angle = v.angle(v1, deg=True)
            #print angle
            f.viewer.plot('rl_solution_%s.png' % (i + 1), n=v.elems)
Beispiel #49
0
  # Configure the logging
  log.config(
    info=params.output.log,
    debug=params.output.debug_log)

  # Print the version number
  logger.info(dials_version())

  # Log the diff phil
  diff_phil = parser.diff_phil.as_str()
  if diff_phil is not '':
    logger.info('The following parameters have been modified:\n')
    logger.info(diff_phil)

  # Get the experiments and reflections
  datablocks = flatten_datablocks(params.input.datablock)

  experiments = flatten_experiments(params.input.experiments)
  reflections = flatten_reflections(params.input.reflections)
  if len(reflections) == 0 and len(experiments) == 0 and len(datablocks) == 0:
    parser.print_help()
    exit(0)

  # Choose the exporter
  if params.format == 'mtz':
    exporter = MTZExporter(params, experiments, reflections)
  elif params.format == 'sadabs':
    exporter = SadabsExporter(params, experiments, reflections)
  elif params.format == 'xds_ascii':
    exporter = XDSASCIIExporter(params, experiments, reflections)
  elif params.format == 'nxs':
Beispiel #50
0
def run(args):

    from dials.util.options import OptionParser
    from dials.util.options import flatten_datablocks
    from dials.util.options import flatten_experiments
    from dials.util.options import flatten_reflections

    parser = OptionParser(
        phil=master_phil,
        read_datablocks=True,
        read_experiments=True,
        read_reflections=True,
        check_format=False,
    )

    params, options = parser.parse_args(show_diff_phil=True)
    datablocks = flatten_datablocks(params.input.datablock)
    experiments = flatten_experiments(params.input.experiments)
    reflections = flatten_reflections(params.input.reflections)[0]
    if len(params.input.reflections) == 2:
        reflections2 = flatten_reflections(params.input.reflections)[1]
    else:
        reflections2 = None

    # find the reflections in the second set that DO NOT match those in the
    # first set
    mask, _ = reflections2.match_with_reference(reflections)
    reflections2 = reflections2.select(~mask)
    print("{0} reflections from the second set do not match the first".format(
        len(reflections2)))
    # reflections2 = reflections2.select(reflections2["miller_index"] == (-7,2,-25))

    if len(datablocks) == 0:
        if len(experiments) > 0:
            imagesets = experiments.imagesets()
        else:
            parser.print_help()
            return
    elif len(datablocks) > 1:
        raise Sorry("Only one DataBlock can be processed at a time")
    else:
        imagesets = datablocks[0].extract_imagesets()

    if len(imagesets) > 1:
        raise Sorry("Only one ImageSet can be processed at a time")
    imageset = imagesets[0]

    import wxtbx.app

    a = wxtbx.app.CCTBXApp(0)
    a.settings = params
    f = PredRelpViewer(None,
                       -1,
                       "Prediction reciprocal lattice viewer",
                       size=(1024, 768))
    f.load_reflections2(reflections2)
    f.load_models(imageset, reflections)
    f.Show()
    a.SetTopWindow(f)
    # a.Bind(wx.EVT_WINDOW_DESTROY, lambda evt: tb_icon.Destroy(), f)
    a.MainLoop()
Beispiel #51
0
  def run(self):
    ''' Extract the shoeboxes. '''
    from dials.util.options import flatten_reflections
    from dials.util.options import flatten_experiments
    from dials.util.options import flatten_datablocks
    from dials.util import log
    from dials.array_family import flex
    from libtbx.utils import Sorry

    # Parse the command line
    params, options = self.parser.parse_args(show_diff_phil=False)

    # Configure logging
    log.config()

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      logger.info('The following parameters have been modified:\n')
      logger.info(diff_phil)

    # Get the data
    reflections = flatten_reflections(params.input.reflections)
    experiments = flatten_experiments(params.input.experiments)
    datablocks = flatten_datablocks(params.input.datablock)
    if not any([experiments, datablocks, reflections]):
      self.parser.print_help()
      exit(0)
    elif experiments and datablocks:
      raise Sorry('Both experiment list and datablocks set')
    elif len(experiments) > 1:
      raise Sorry('More than 1 experiment set')
    elif len(datablocks) > 1:
      raise Sorry('More than 1 datablock set')
    elif len(experiments) == 1:
      imageset = experiments[0].imageset
    elif len(datablocks) == 1:
      imagesets = datablocks[0].extract_imagesets()
      if len(imagesets) != 1:
        raise Sorry('Need 1 imageset, got %d' % len(imagesets))
      imageset = imagesets[0]
    if len(reflections) != 1:
      raise Sorry('Need 1 reflection table, got %d' % len(reflections))
    else:
      reflections = reflections[0]

    # Check the reflections contain the necessary stuff
    assert("bbox" in reflections)
    assert("panel" in reflections)

    # Get some models
    detector = imageset.get_detector()
    scan = imageset.get_scan()
    frame0, frame1 = scan.get_array_range()

    # Add some padding but limit to image volume
    if params.padding > 0:
      logger.info('Adding %d pixels as padding' % params.padding)
      x0, x1, y0, y1, z0, z1 = reflections['bbox'].parts()
      x0 -= params.padding
      x1 += params.padding
      y0 -= params.padding
      y1 += params.padding
      # z0 -= params.padding
      # z1 += params.padding
      panel = reflections['panel']
      for i in range(len(reflections)):
        width, height = detector[panel[i]].get_image_size()
        if x0[i] < 0: x0[i] = 0
        if x1[i] > width: x1[i] = width
        if y0[i] < 0: y0[i] = 0
        if y1[i] > height: y1[i] = height
        if z0[i] < frame0: z0[i] = frame0
        if z1[i] > frame1: z1[i] = frame1
      reflections['bbox'] = flex.int6(x0, x1, y0, y1, z0, z1)

    # Save the old shoeboxes
    if "shoebox" in reflections:
      old_shoebox = reflections['shoebox']
    else:
      old_shoebox = None

    # Allocate the shoeboxes
    reflections["shoebox"] = flex.shoebox(
      reflections["panel"],
      reflections["bbox"],
      allocate=True)

    # Extract the shoeboxes
    reflections.extract_shoeboxes(imageset, verbose=True)

    # Preserve masking
    if old_shoebox is not None:
      from dials.algorithms.shoebox import MaskCode
      logger.info("Applying old shoebox mask")
      new_shoebox = reflections['shoebox']
      for i in range(len(reflections)):
        bbox0 = old_shoebox[i].bbox
        bbox1 = new_shoebox[i].bbox
        mask0 = old_shoebox[i].mask
        mask1 = new_shoebox[i].mask
        mask2 = flex.int(mask1.accessor(), 0)
        x0 = bbox0[0] - bbox1[0]
        x1 = bbox0[1] - bbox0[0] + x0
        y0 = bbox0[2] - bbox1[2]
        y1 = bbox0[3] - bbox0[2] + y0
        z0 = bbox0[4] - bbox1[4]
        z1 = bbox0[5] - bbox0[4] + z0
        mask2[z0:z1,y0:y1,x0:x1] = mask0
        mask1 = mask1.as_1d() | mask2.as_1d()
        if params.padding_is_background:
          selection = flex.size_t(range(len(mask1))).select(mask1 == MaskCode.Valid)
          values = flex.int(len(selection), MaskCode.Valid | MaskCode.Background)
          mask1.set_selected(selection, values)
        mask1.reshape(new_shoebox[i].mask.accessor())
        new_shoebox[i].mask = mask1

    # Saving the reflections to disk
    filename = params.output.reflections
    logger.info('Saving %d reflections to %s' % (len(reflections), filename))
    reflections.as_pickle(filename)
Beispiel #52
0
  def run(self):
    '''Execute the script.'''
    from dials.array_family import flex
    from dials.util.options import flatten_datablocks
    from time import time
    from dials.util import log
    from libtbx.utils import Sorry
    start_time = time()

    # Parse the command line
    params, options = self.parser.parse_args(show_diff_phil=False)

    # Configure the logging
    log.config(
      params.verbosity,
      info=params.output.log,
      debug=params.output.debug_log)

    from dials.util.version import dials_version
    logger.info(dials_version())

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      logger.info('The following parameters have been modified:\n')
      logger.info(diff_phil)

    # Ensure we have a data block
    datablocks = flatten_datablocks(params.input.datablock)
    if len(datablocks) == 0:
      self.parser.print_help()
      return
    elif len(datablocks) != 1:
      raise Sorry('only 1 datablock can be processed at a time')

    # Loop through all the imagesets and find the strong spots
    reflections = flex.reflection_table.from_observations(
      datablocks[0], params)

    # Delete the shoeboxes
    if not params.output.shoeboxes:
      del reflections['shoebox']

    # ascii spot count per image plot
    from dials.util.ascii_art import spot_counts_per_image_plot

    for i, imageset in enumerate(datablocks[0].extract_imagesets()):
      ascii_plot = spot_counts_per_image_plot(
        reflections.select(reflections['id'] == i))
      if len(ascii_plot):
        logger.info('\nHistogram of per-image spot count for imageset %i:' %i)
        logger.info(ascii_plot)

    # Save the reflections to file
    logger.info('\n' + '-' * 80)
    reflections.as_pickle(params.output.reflections)
    logger.info('Saved {0} reflections to {1}'.format(
        len(reflections), params.output.reflections))

    # Save the datablock
    if params.output.datablock:
      from dxtbx.datablock import DataBlockDumper
      logger.info('Saving datablocks to {0}'.format(
        params.output.datablock))
      dump = DataBlockDumper(datablocks)
      dump.as_file(params.output.datablock)

    # Print some per image statistics
    if params.per_image_statistics:
      from dials.algorithms.spot_finding import per_image_analysis
      from cStringIO import StringIO
      s = StringIO()
      for i, imageset in enumerate(datablocks[0].extract_imagesets()):
        print >> s, "Number of centroids per image for imageset %i:" %i
        stats = per_image_analysis.stats_imageset(
          imageset, reflections.select(reflections['id'] == i),
          resolution_analysis=False)
        per_image_analysis.print_table(stats, out=s)
      logger.info(s.getvalue())

    # Print the time
    logger.info("Time Taken: %f" % (time() - start_time))
Beispiel #53
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_experiments
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_reflections
  import libtbx.load_env

  usage = "%s [options] datablock.json | experiments.json | image_*.cbf" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_experiments=True,
    read_datablocks=True,
    read_datablocks_from_images=True,
    read_reflections=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  experiments = flatten_experiments(params.input.experiments)
  datablocks = flatten_datablocks(params.input.datablock)
  reflections = flatten_reflections(params.input.reflections)

  if len(datablocks) == 0 and len(experiments) == 0 and len(reflections) == 0:
    parser.print_help()
    exit()

  for i_expt, expt in enumerate(experiments):
    print "Experiment %i:" %i_expt
    print str(expt.detector)
    print 'Max resolution (at corners): %f' % (
      expt.detector.get_max_resolution(expt.beam.get_s0()))
    print 'Max resolution (inscribed):  %f' % (
      expt.detector.get_max_inscribed_resolution(expt.beam.get_s0()))
    if params.show_panel_distance:
      for ipanel, panel in enumerate(expt.detector):
        from scitbx import matrix
        fast = matrix.col(panel.get_fast_axis())
        slow = matrix.col(panel.get_slow_axis())
        normal = fast.cross(slow)
        origin = matrix.col(panel.get_origin())
        distance = origin.dot(normal)
        fast_origin = - (origin - distance * normal).dot(fast)
        slow_origin = - (origin - distance * normal).dot(slow)
        print 'Panel %d: distance %.2f origin %.2f %.2f' % \
          (ipanel, distance, fast_origin, slow_origin)
      print ''
    print ''
    panel_id, (x, y) = beam_centre(expt.detector, expt.beam)
    if panel_id >= 0 and x is not None and y is not None:
      if len(expt.detector) > 1:
        beam_centre_str = "Beam centre: panel %i, (%.2f,%.2f)" %(panel_id, x, y)
      else:
        beam_centre_str = "Beam centre: (%.2f,%.2f)" %(x, y)
    else:
      beam_centre_str = ""
    print str(expt.beam) + beam_centre_str + '\n'
    if expt.scan is not None:
      print expt.scan
    if expt.goniometer is not None:
      print expt.goniometer
    expt.crystal.show(show_scan_varying=params.show_scan_varying)
    if expt.crystal.num_scan_points:
      from scitbx.array_family import flex
      from cctbx import uctbx
      abc = flex.vec3_double()
      angles = flex.vec3_double()
      for n in range(expt.crystal.num_scan_points):
        a, b, c, alpha, beta, gamma = expt.crystal.get_unit_cell_at_scan_point(n).parameters()
        abc.append((a, b, c))
        angles.append((alpha, beta, gamma))
      a, b, c = abc.mean()
      alpha, beta, gamma = angles.mean()
      mean_unit_cell = uctbx.unit_cell((a, b, c, alpha, beta, gamma))
      print "  Average unit cell: %s" %mean_unit_cell
    print

  for datablock in datablocks:
    if datablock.format_class() is not None:
      print 'Format: %s' %datablock.format_class()
    imagesets = datablock.extract_imagesets()
    for imageset in imagesets:
      try: print imageset.get_template()
      except Exception: pass
      detector = imageset.get_detector()
      print str(detector) + 'Max resolution: %f\n' %(
        detector.get_max_resolution(imageset.get_beam().get_s0()))
      if params.show_panel_distance:
        for ipanel, panel in enumerate(detector):
          from scitbx import matrix
          fast = matrix.col(panel.get_fast_axis())
          slow = matrix.col(panel.get_slow_axis())
          normal = fast.cross(slow)
          origin = matrix.col(panel.get_origin())
          distance = origin.dot(normal)
          fast_origin = - (origin - distance * normal).dot(fast)
          slow_origin = - (origin - distance * normal).dot(slow)
          print 'Panel %d: distance %.2f origin %.2f %.2f' % \
            (ipanel, distance, fast_origin, slow_origin)
        print ''
      panel_id, (x, y) = beam_centre(detector, imageset.get_beam())
      if panel_id >= 0 and x is not None and y is not None:
        if len(detector) > 1:
          beam_centre_str = "Beam centre: panel %i, (%.2f,%.2f)" %(panel_id, x, y)
        else:
          beam_centre_str = "Beam centre: (%.2f,%.2f)" %(x, y)
      else:
        beam_centre_str = ""
      print str(imageset.get_beam()) + beam_centre_str + '\n'
      if imageset.get_scan() is not None:
        print imageset.get_scan()
      if imageset.get_goniometer() is not None:
        print imageset.get_goniometer()

  from libtbx.containers import OrderedDict, OrderedSet
  formats = OrderedDict([
    ('miller_index', '%i, %i, %i'),
    ('d','%.2f'),
    ('dqe','%.3f'),
    ('id','%i'),
    ('imageset_id','%i'),
    ('panel','%i'),
    ('flags', '%i'),
    ('background.mean', '%.1f'),
    ('background.dispersion','%.1f'),
    ('background.mse', '%.1f'),
    ('background.sum.value', '%.1f'),
    ('background.sum.variance', '%.1f'),
    ('intensity.prf.value','%.1f'),
    ('intensity.prf.variance','%.1f'),
    ('intensity.sum.value','%.1f'),
    ('intensity.sum.variance','%.1f'),
    ('intensity.cor.value','%.1f'),
    ('intensity.cor.variance','%.1f'),
    ('lp','%.3f'),
    ('num_pixels.background','%i'),
    ('num_pixels.background_used','%i'),
    ('num_pixels.foreground','%i'),
    ('num_pixels.valid','%i'),
    ('partial_id','%i'),
    ('partiality','%.4f'),
    ('profile.correlation','%.3f'),
    ('profile.rmsd','%.3f'),
    ('xyzcal.mm','%.2f, %.2f, %.2f'),
    ('xyzcal.px','%.2f, %.2f, %.2f'),
    ('delpsical.rad','%.3f'),
    ('delpsical2','%.3f'),
    ('xyzobs.mm.value','%.2f, %.2f, %.2f'),
    ('xyzobs.mm.variance','%.4e, %.4e, %.4e'),
    ('xyzobs.px.value','%.2f, %.2f, %.2f'),
    ('xyzobs.px.variance','%.4f, %.4f, %.4f'),
    ('s1','%.4f, %.4f, %.4f'),
    ('rlp','%.4f, %.4f, %.4f'),
    ('zeta','%.3f'),
    ('x_resid','%.3f'),
    ('x_resid2','%.3f'),
    ('y_resid','%.3f'),
    ('y_resid2','%.3f'),
    ])

  for rlist in reflections:
    from cctbx.array_family import flex
    print
    print "Reflection list contains %i reflections" %(len(rlist))
    rows = [["Column", "min", "max", "mean"]]
    for k, col in rlist.cols():
      if type(col) in (flex.double, flex.int, flex.size_t):
        if type(col) in (flex.int, flex.size_t):
          col = col.as_double()
        rows.append([k, formats[k] %flex.min(col), formats[k] %flex.max(col),
                     formats[k]%flex.mean(col)])
      elif type(col) in (flex.vec3_double, flex.miller_index):
        if type(col) == flex.miller_index:
          col = col.as_vec3_double()
        rows.append([k, formats[k] %col.min(), formats[k] %col.max(),
                     formats[k]%col.mean()])

    from libtbx import table_utils
    print table_utils.format(rows, has_header=True, prefix="| ", postfix=" |")

  intensity_keys = (
    'miller_index', 'd', 'intensity.prf.value', 'intensity.prf.variance',
    'intensity.sum.value', 'intensity.sum.variance', 'background.mean',
    'profile.correlation', 'profile.rmsd'
  )

  profile_fit_keys = ('miller_index', 'd',)

  centroid_keys = (
    'miller_index', 'd', 'xyzcal.mm', 'xyzcal.px', 'xyzobs.mm.value',
    'xyzobs.mm.variance', 'xyzobs.px.value', 'xyzobs.px.variance'
  )

  keys_to_print = OrderedSet()

  if params.show_intensities:
    for k in intensity_keys: keys_to_print.add(k)
  if params.show_profile_fit:
    for k in profile_fit_keys: keys_to_print.add(k)
  if params.show_centroids:
    for k in centroid_keys: keys_to_print.add(k)
  if params.show_all_reflection_data:
    for k in formats: keys_to_print.add(k)

  def format_column(key, data, format_strings=None):
    if isinstance(data, flex.vec3_double):
      c_strings = [c.as_string(format_strings[i].strip()) for i, c in enumerate(data.parts())]
    elif isinstance(data, flex.miller_index):
      c_strings = [c.as_string(format_strings[i].strip()) for i, c in enumerate(data.as_vec3_double().parts())]
    elif isinstance(data, flex.size_t):
      c_strings = [data.as_int().as_string(format_strings[0].strip())]
    else:
      c_strings = [data.as_string(format_strings[0].strip())]

    column = flex.std_string()
    max_element_lengths = [c.max_element_length() for c in c_strings]
    for i in range(len(c_strings[0])):

      column.append(('%%%is' %len(key)) %', '.join(
        ('%%%is' %max_element_lengths[j]) %c_strings[j][i]
        for j in range(len(c_strings))))
    return column


  if keys_to_print:
    keys = [k for k in keys_to_print if k in rlist]
    rows = [keys]
    max_reflections = len(rlist)
    if params.max_reflections is not None:
      max_reflections = min(len(rlist), params.max_reflections)

    columns = []

    for k in keys:
      columns.append(format_column(k, rlist[k], format_strings=formats[k].split(',')))

    print
    print "Printing %i of %i reflections:" %(max_reflections, len(rlist))
    for j in range(len(columns)):
      key = keys[j]
      width = max(len(key), columns[j].max_element_length())
      print ("%%%is" %width) %key,
    print
    for i in range(max_reflections):
      for j in range(len(columns)):
        print columns[j][i],
      print

  return
Beispiel #54
0
def run(args):
  import libtbx.load_env
  usage = """\
%s datablock.json reflections.pickle [options]""" %libtbx.env.dispatcher_name
  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  from dials.util.options import flatten_experiments
  from dials.util.options import flatten_reflections
  from scitbx.array_family import flex
  from scitbx import matrix
  from libtbx.utils import Sorry
  parser = OptionParser(
    usage=usage,
    phil=master_phil_scope,
    read_datablocks=True,
    read_experiments=True,
    read_reflections=True,
    check_format=False)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)
  reflections = flatten_reflections(params.input.reflections)
  experiments = flatten_experiments(params.input.experiments)
  if len(datablocks) == 1:
    imageset = datablocks[0].extract_imagesets()[0]
  elif len(datablocks) > 1:
    raise Sorry("Only one DataBlock can be processed at a time")
  elif len(experiments.imagesets()) > 0:
    imageset = experiments.imagesets()[0]
    imageset.set_detector(experiments[0].detector)
    imageset.set_beam(experiments[0].beam)
    imageset.set_goniometer(experiments[0].goniometer)
  else:
    parser.print_help()
    return

  detector = imageset.get_detector()
  scan = imageset.get_scan()

  panel_origin_shifts = {0: (0,0,0)}
  try:
    hierarchy = detector.hierarchy()
  except AttributeError:
    hierarchy = None
  for i_panel in range(1, len(detector)):
    origin_shift = matrix.col(detector[0].get_origin()) \
      - matrix.col(detector[i_panel].get_origin())
    panel_origin_shifts[i_panel] = origin_shift

  observed_xyz = flex.vec3_double()
  predicted_xyz = flex.vec3_double()

  for reflection_list in reflections:

    if len(params.scan_range):
      sel = flex.bool(len(reflection_list), False)

      xyzcal_px = None
      xyzcal_px = None

      if 'xyzcal.px' in reflection_list:
        xyzcal_px = reflection_list['xyzcal.px']
      if 'xyzobs.px.value' in reflection_list:
        xyzobs_px = reflection_list['xyzobs.px.value']

      if xyzcal_px is not None and not xyzcal_px.norms().all_eq(0):
        centroids_frame = xyzcal_px.parts()[2]
      elif xyzobs_px is not None and not xyzobs_px.norms().all_eq(0):
        centroids_frame = xyzobs_px.parts()[2]
      else:
        raise Sorry("No pixel coordinates given in input reflections.")

      reflections_in_range = False
      for scan_range in params.scan_range:
        if scan_range is None: continue
        range_start, range_end = scan_range
        sel |= ((centroids_frame >= range_start) & (centroids_frame < range_end))
      reflection_list = reflection_list.select(sel)
    if params.first_n_reflections is not None:
      centroid_positions = reflection_list.centroid_position()
      centroids_frame = centroid_positions.parts()[2]
      perm = flex.sort_permutation(centroids_frame)
      perm = perm[:min(reflection_list.size(), params.first_n_reflections)]
      reflection_list = reflection_list.select(perm)
    if params.crystal_id is not None:
      reflection_list = reflection_list.select(
        reflection_list['id'] == params.crystal_id)

    xyzcal_px = None
    xyzcal_px = None
    xyzobs_mm = None
    xyzcal_mm = None

    if 'xyzcal.px' in reflection_list:
      xyzcal_px = reflection_list['xyzcal.px']
    if 'xyzobs.px.value' in reflection_list:
      xyzobs_px = reflection_list['xyzobs.px.value']
    if 'xyzcal.mm' in reflection_list:
      xyzcal_mm = reflection_list['xyzcal.mm']
    if 'xyzobs.mm.value' in reflection_list:
      xyzobs_mm = reflection_list['xyzobs.mm.value']

    panel_ids = reflection_list['panel']
    if xyzobs_mm is None and xyzobs_px is not None:
      xyzobs_mm = flex.vec3_double()
      for i_panel in range(len(detector)):
        xyzobs_px_panel = xyzobs_px.select(panel_ids == i_panel)

        from dials.algorithms.centroid import centroid_px_to_mm_panel
        xyzobs_mm_panel, _, _ = centroid_px_to_mm_panel(
          detector[i_panel], scan, xyzobs_px_panel,
          flex.vec3_double(xyzobs_px_panel.size()),
          flex.vec3_double(xyzobs_px_panel.size()))
        xyzobs_mm.extend(xyzobs_mm_panel)

    if xyzobs_mm is not None:
      observed_xyz.extend(xyzobs_mm)
    if xyzcal_mm is not None:
      predicted_xyz.extend(xyzcal_mm)

  obs_x, obs_y, _ = observed_xyz.parts()
  pred_x, pred_y, _ = predicted_xyz.parts()

  try:
    import matplotlib

    if not params.output.show_plot:
      # http://matplotlib.org/faq/howto_faq.html#generate-images-without-having-a-window-appear
      matplotlib.use('Agg') # use a non-interactive backend
    from matplotlib import pyplot
  except ImportError:
    raise Sorry("matplotlib must be installed to generate a plot.")

  fig = pyplot.figure()
  fig.set_size_inches(params.output.size_inches)
  fig.set_dpi(params.output.dpi)
  pyplot.axes().set_aspect('equal')
  marker_size = params.output.marker_size
  if obs_x.size():
    pyplot.scatter(obs_x, obs_y, marker='o', c='white', s=marker_size, alpha=1)
  if pred_x.size():
    pyplot.scatter(pred_x, pred_y, marker='+', s=marker_size, c='blue')
  #assert len(detector) == 1
  panel = detector[0]
  #if len(detector) > 1:
  xmin = max([detector[i_panel].get_image_size_mm()[0] + panel_origin_shifts[i_panel][0]
              for i_panel in range(len(detector))])
  xmax = max([detector[i_panel].get_image_size_mm()[0] + panel_origin_shifts[i_panel][0]
              for i_panel in range(len(detector))])
  ymax = max([detector[i_panel].get_image_size_mm()[1] + panel_origin_shifts[i_panel][1]
              for i_panel in range(len(detector))])
  ymax = max([detector[i_panel].get_image_size_mm()[1] + panel_origin_shifts[i_panel][1]
              for i_panel in range(len(detector))])
  try:
    beam_centre = hierarchy.get_beam_centre(imageset.get_beam().get_s0())
  except Exception:
    beam_centre = detector[0].get_beam_centre(imageset.get_beam().get_s0())
  pyplot.scatter([beam_centre[0]], [beam_centre[1]], marker='+', c='blue', s=100)
  pyplot.xlim(0, xmax)
  pyplot.ylim(0, ymax)
  pyplot.gca().invert_yaxis()
  pyplot.title('Centroid x,y-coordinates')
  pyplot.xlabel('x-coordinate (mm)')
  pyplot.ylabel('y-coordinate (mm)')
  if params.output.file_name is not None:
    pyplot.savefig(params.output.file_name,
                   size_inches=params.output.size_inches,
                   dpi=params.output.dpi,
                   bbox_inches='tight')
  if params.output.show_plot:
    pyplot.show()
Beispiel #55
0
def run(args):
  import os
  import libtbx.load_env
  from libtbx.utils import Sorry
  from dials.util.options import OptionParser
  usage = "%s [options] datablock.json | image.cbf" %libtbx.env.dispatcher_name

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    read_datablocks_from_images=True,
    check_format=True,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)

  datablocks = flatten_datablocks(params.input.datablock)
  if len(datablocks) == 0:
    parser.print_help()
    exit(0)

  imagesets = datablocks[0].extract_imagesets()

  brightness = params.brightness / 100
  vendortype = "made up"

  # check that binning is a power of 2
  binning = params.binning
  if not (binning > 0 and ((binning & (binning - 1)) == 0)):
    raise Sorry("binning must be a power of 2")

  output_dir = params.output_dir
  if output_dir is None:
    output_dir = "."
  elif not os.path.exists(output_dir):
    os.makedirs(output_dir)

  from rstbx.slip_viewer.tile_generation \
       import _get_flex_image, _get_flex_image_multipanel

  for imageset in imagesets:
    detector = imageset.get_detector()
    panel = detector[0]
    # XXX is this inclusive or exclusive?
    saturation = panel.get_trusted_range()[1]
    if params.saturation:
      saturation = params.saturation
    for i_image, image in enumerate(imageset):

      if len(detector) == 1:
        image = [image]

      trange = [p.get_trusted_range() for p in detector]
      mask = []
      mask = imageset.get_mask(i_image)
      if mask is None:
        mask = [p.get_trusted_range_mask(im) for im, p in zip(image, detector)]

      image = image_filter(image, mask, display=params.display, gain_value=params.gain,
                           nsigma_b=params.nsigma_b,
                           nsigma_s=params.nsigma_s,
                           global_threshold=params.global_threshold,
                           min_local=params.min_local,
                           kernel_size=params.kernel_size)

      if len(detector) > 1:
        # FIXME This doesn't work properly, as flex_image.size2() is incorrect
        # also binning doesn't work
        assert binning == 1
        flex_image = _get_flex_image_multipanel(
          brightness=brightness,
          panels=detector,
          raw_data=image)
      else:
        flex_image = _get_flex_image(
          brightness=brightness,
          data=image[0],
          binning=binning,
          saturation=saturation,
          vendortype=vendortype)

      flex_image.setWindow(0, 0, 1)
      flex_image.adjust(color_scheme=colour_schemes.get(params.colour_scheme))

      # now export as a bitmap
      flex_image.prep_string()
      import Image
      # XXX is size//binning safe here?
      pil_img = Image.fromstring(
        'RGB', (flex_image.size2()//binning,
                flex_image.size1()//binning),
        flex_image.export_string)

      path = os.path.join(
        output_dir, params.prefix + ("%04d" % i_image) + '.' + params.format)

      print "Exporting %s" %path
      tmp_stream = open(path, 'wb')
      pil_img.save(tmp_stream, format=params.format)
      tmp_stream.close()
Beispiel #56
0
def run(args):

    from dials.util.options import OptionParser
    from dials.util.options import flatten_datablocks
    from dials.util.masking import GoniometerShadowMaskGenerator
    from libtbx.utils import Sorry
    import libtbx.load_env

    usage = "%s [options] experiments.json" % libtbx.env.dispatcher_name

    parser = OptionParser(
        usage=usage,
        phil=phil_scope,
        read_datablocks=True,
        check_format=False,
        epilog=help_message,
    )

    params, options = parser.parse_args(show_diff_phil=True)
    datablocks = flatten_datablocks(params.input.datablock)
    if len(datablocks) == 0:
        parser.print_help()
        return

    imagesets = []
    for datablock in datablocks:
        imagesets.extend(datablock.extract_imagesets())

    for imageset in imagesets:
        import math

        height = params.height  # mm
        radius = params.radius  # mm

        steps_per_degree = 10
        steps_per_degree = 1

        theta = (
            flex.double([range(360 * steps_per_degree)])
            * math.pi
            / 180
            * 1
            / steps_per_degree
        )
        y = radius * flex.cos(theta)  # x
        z = radius * flex.sin(theta)  # y
        x = flex.double(theta.size(), height)  # z

        coords = flex.vec3_double(zip(x, y, z))
        coords.insert(0, (0, 0, 0))

        gonio = imageset.get_goniometer()
        scan = imageset.get_scan()
        beam = imageset.get_beam()
        detector = imageset.get_detector()

        if params.angle is not None:
            angle = params.angle
        else:
            angle = scan.get_oscillation()[0]
        gonio_masker = GoniometerShadowMaskGenerator(
            gonio, coords, flex.size_t(len(coords), 0)
        )

        from matplotlib import pyplot as plt

        if params.output.animation is not None:

            import matplotlib.animation as manimation

            import os.path

            ext = os.path.splitext(params.output.animation)
            metadata = dict(
                title="Movie Test", artist="Matplotlib", comment="Movie support!"
            )
            if ext[1] == ".mp4":
                FFMpegWriter = manimation.writers["ffmpeg"]
                writer = FFMpegWriter(fps=15, metadata=metadata)
            elif ext[1] == ".gif":
                ImagemagickWriter = manimation.writers["imagemagick_file"]
                writer = ImagemagickWriter(fps=15, metadata=metadata)

            fig = plt.figure()
            (l,) = plt.plot([], [], c="r", marker=None)
            plt.axes().set_aspect("equal")
            plt.xlim(0, detector[0].get_image_size()[0])
            plt.ylim(0, detector[0].get_image_size()[0])
            plt.gca().invert_yaxis()
            title = plt.axes().set_title("")

            with writer.saving(fig, params.output.animation, 100):
                start, end = scan.get_array_range()
                step_size = 5
                for i in range(start, end, step_size):
                    angle = scan.get_angle_from_array_index(i)
                    shadow_boundary = gonio_masker.project_extrema(detector, angle)
                    x, y = shadow_boundary[0].parts()
                    l.set_data(x.as_numpy_array(), y.as_numpy_array())
                    title.set_text("scan_angle = %.1f degrees" % angle)
                    writer.grab_frame()

            plt.close()

        shadow_boundary = gonio_masker.project_extrema(detector, angle)

        with open("shadow.phil", "wb") as f:
            print("untrusted {", file=f)
            print("  polygon = \\", file=f)
            for c in shadow_boundary[0]:
                print("    %0.f %.0f \\" % (max(c[0], 0), max(c[1], 0)), file=f)
            print("}", file=f)

        import matplotlib.pyplot as plt

        fig = plt.figure()

        x, y, z = coords.parts()
        plt.scatter(x.as_numpy_array(), y.as_numpy_array())
        plt.axes().set_aspect("equal")
        plt.xlabel("x (gonio axis)")
        plt.ylabel("y (perpendicular to beam)")
        plt.savefig("gonio_xy.png")

        plt.scatter(y.as_numpy_array(), z.as_numpy_array())
        plt.axes().set_aspect("equal")
        plt.xlabel("y (perpendicular to beam)")
        plt.ylabel("z (towards beam))")
        plt.savefig("gonio_yz.png")

        plt.scatter(z.as_numpy_array(), x.as_numpy_array())
        plt.axes().set_aspect("equal")
        plt.xlabel("z (towards beam)")
        plt.ylabel("x (gonio axis)")
        plt.savefig("gonio_zx.png")

        for p_id in range(len(detector)):
            x, y = shadow_boundary[p_id].parts()
            fig = plt.figure()
            plt.scatter(x.as_numpy_array(), y.as_numpy_array(), c="r", s=1, marker="x")
            plt.axes().set_aspect("equal")
            plt.xlim(0, detector[p_id].get_image_size()[0])
            plt.ylim(0, detector[p_id].get_image_size()[0])
            plt.gca().invert_yaxis()
            plt.savefig("shadow.png")
  def run(self):
    ''' Parse the options. '''
    from dials.util.options import flatten_experiments, flatten_datablocks, flatten_reflections
    # Parse the command line arguments
    params, options = self.parser.parse_args(show_diff_phil=True)
    self.params = params
    experiments = flatten_experiments(params.input.experiments)
    datablocks = flatten_datablocks(params.input.datablock)
    reflections = flatten_reflections(params.input.reflections)

    # Find all detector objects
    detectors = []
    detectors.extend(experiments.detectors())
    dbs = []
    for datablock in datablocks:
      dbs.extend(datablock.unique_detectors())
    detectors.extend(dbs)

    # Verify inputs
    if len(detectors) != 2:
      print "Please provide two experiments and or datablocks for comparison"
      return

    # These lines exercise the iterate_detector_at_level and iterate_panels functions
    # for a detector with 4 hierarchy levels
    """
    print "Testing iterate_detector_at_level"
    for level in xrange(4):
      print "iterating at level", level
      for panelg in iterate_detector_at_level(detectors[0].hierarchy(), 0, level):
        print panelg.get_name()

    print "Testing iterate_panels"
    for level in xrange(4):
      print "iterating at level", level
      for panelg in iterate_detector_at_level(detectors[0].hierarchy(), 0, level):
        for panel in iterate_panels(panelg):
          print panel.get_name()
    """
    tmp = []
    for refls in reflections:
      print "N reflections total:", len(refls)
      sel = refls.get_flags(refls.flags.used_in_refinement)
      if sel.count(True) > 0:
        refls = refls.select(sel)
        print "N reflections used in refinement", len(refls)
        print "Reporting only on those reflections used in refinement"

      refls['difference_vector_norms'] = (refls['xyzcal.mm']-refls['xyzobs.mm.value']).norms()
      tmp.append(refls)
    reflections = tmp

    s0 = col(flex.vec3_double([col(b.get_s0()) for b in experiments.beams()]).mean())

    # Compute a set of radial and transverse displacements for each reflection
    print "Setting up stats..."
    tmp_refls = []
    for refls, expts in zip(reflections, [wrapper.data for wrapper in params.input.experiments]):
      tmp = flex.reflection_table()
      assert len(expts.detectors()) == 1
      dect = expts.detectors()[0]
      # Need to construct a variety of vectors
      for panel_id, panel in enumerate(dect):
        panel_refls = refls.select(refls['panel'] == panel_id)
        bcl = flex.vec3_double()
        # Compute the beam center in lab space (a vector pointing from the origin to where the beam would intersect
        # the panel, if it did intersect the panel)
        for expt_id in set(panel_refls['id']):
          beam = expts[expt_id].beam
          s0_ = beam.get_s0()
          expt_refls = panel_refls.select(panel_refls['id'] == expt_id)
          beam_centre = panel.get_beam_centre_lab(s0_)
          bcl.extend(flex.vec3_double(len(expt_refls), beam_centre))
        panel_refls['beam_centre_lab'] = bcl

        # Compute obs in lab space
        x, y, _ = panel_refls['xyzobs.mm.value'].parts()
        c = flex.vec2_double(x, y)
        panel_refls['obs_lab_coords'] = panel.get_lab_coord(c)
        # Compute deltaXY in panel space. This vector is relative to the panel origin
        x, y, _ = (panel_refls['xyzcal.mm'] - panel_refls['xyzobs.mm.value']).parts()
        # Convert deltaXY to lab space, subtracting off of the panel origin
        panel_refls['delta_lab_coords'] = panel.get_lab_coord(flex.vec2_double(x,y)) - panel.get_origin()
        tmp.extend(panel_refls)
      refls = tmp
      # The radial vector points from the center of the reflection to the beam center
      radial_vectors = (refls['obs_lab_coords'] - refls['beam_centre_lab']).each_normalize()
      # The transverse vector is orthogonal to the radial vector and the beam vector
      transverse_vectors = radial_vectors.cross(refls['beam_centre_lab']).each_normalize()
      # Compute the raidal and transverse components of each deltaXY
      refls['radial_displacements']     = refls['delta_lab_coords'].dot(radial_vectors)
      refls['transverse_displacements'] = refls['delta_lab_coords'].dot(transverse_vectors)

      tmp_refls.append(refls)
    reflections = tmp_refls

    # storage for plots
    refl_counts = {}

    # Data for all tables
    pg_bc_dists = flex.double()
    root1 = detectors[0].hierarchy()
    root2 = detectors[1].hierarchy()
    all_weights = flex.double()
    all_refls_count = flex.int()

    # Data for lab space table
    lab_table_data = []
    lab_delta_table_data = []
    all_lab_x = flex.double()
    all_lab_y = flex.double()
    all_lab_z = flex.double()
    pg_lab_x_sigmas = flex.double()
    pg_lab_y_sigmas = flex.double()
    pg_lab_z_sigmas = flex.double()
    all_rotX = flex.double()
    all_rotY = flex.double()
    all_rotZ = flex.double()
    pg_rotX_sigmas = flex.double()
    pg_rotY_sigmas = flex.double()
    pg_rotZ_sigmas = flex.double()
    all_delta_x = flex.double()
    all_delta_y = flex.double()
    all_delta_z = flex.double()
    all_delta_xy = flex.double()
    all_delta_xyz = flex.double()
    all_delta_r = flex.double()
    all_delta_t = flex.double()
    all_delta_norm = flex.double()

    if params.hierarchy_level > 0:
      # Data for local table
      local_table_data = []
      local_delta_table_data = []
      all_local_x = flex.double()
      all_local_y = flex.double()
      all_local_z = flex.double()
      pg_local_x_sigmas = flex.double()
      pg_local_y_sigmas = flex.double()
      pg_local_z_sigmas = flex.double()
      all_local_rotX = flex.double()
      all_local_rotY = flex.double()
      all_local_rotZ = flex.double()
      pg_local_rotX_sigmas = flex.double()
      pg_local_rotY_sigmas = flex.double()
      pg_local_rotZ_sigmas = flex.double()
      all_local_delta_x = flex.double()
      all_local_delta_y = flex.double()
      all_local_delta_z = flex.double()
      all_local_delta_xy = flex.double()
      all_local_delta_xyz = flex.double()

    # Data for RMSD table
    rmsds_table_data = []

    for pg_id, (pg1, pg2) in enumerate(zip(iterate_detector_at_level(root1, 0, params.hierarchy_level),
                                           iterate_detector_at_level(root2, 0, params.hierarchy_level))):
      # Count up the number of reflections in this panel group pair for use as a weighting scheme
      total_refls = 0
      pg1_refls = 0
      pg2_refls = 0
      for p1, p2 in zip(iterate_panels(pg1), iterate_panels(pg2)):
        r1 = len(reflections[0].select(reflections[0]['panel'] == id_from_name(detectors[0], p1.get_name())))
        r2 = len(reflections[1].select(reflections[1]['panel'] == id_from_name(detectors[1], p2.get_name())))
        total_refls += r1 + r2
        pg1_refls += r1
        pg2_refls += r2
      if pg1_refls == 0 and pg2_refls == 0:
        print "No reflections on panel group", pg_id
        continue
      all_refls_count.append(total_refls)
      all_weights.append(pg1_refls)
      all_weights.append(pg2_refls)

      assert pg1.get_name() == pg2.get_name()
      refl_counts[pg1.get_name()] = total_refls

      # Compute RMSDs
      row = ["%d"%pg_id]
      for pg, refls, det in zip([pg1, pg2], reflections, detectors):
        pg_refls = flex.reflection_table()
        for p in iterate_panels(pg):
          pg_refls.extend(refls.select(refls['panel'] == id_from_name(det, p.get_name())))
        if len(pg_refls) == 0:
          rmsd = r_rmsd = t_rmsd = 0
        else:
          rmsd = math.sqrt(flex.sum_sq(pg_refls['difference_vector_norms'])/len(pg_refls))*1000
          r_rmsd = math.sqrt(flex.sum_sq(pg_refls['radial_displacements'])/len(pg_refls))*1000
          t_rmsd = math.sqrt(flex.sum_sq(pg_refls['transverse_displacements'])/len(pg_refls))*1000

        row.extend(["%6.1f"%rmsd, "%6.1f"%r_rmsd, "%6.1f"%t_rmsd, "%8d"%len(pg_refls)])
      rmsds_table_data.append(row)

      dists = flex.double()
      lab_x = flex.double()
      lab_y = flex.double()
      lab_z = flex.double()
      rot_X = flex.double()
      rot_Y = flex.double()
      rot_Z = flex.double()

      for pg in [pg1, pg2]:
        bc = col(pg.get_beam_centre_lab(s0))
        ori = get_center(pg)

        dists.append((ori-bc).length())

        ori_lab = pg.get_origin()
        lab_x.append(ori_lab[0])
        lab_y.append(ori_lab[1])
        lab_z.append(ori_lab[2])

        f = col(pg.get_fast_axis())
        s = col(pg.get_slow_axis())
        n = col(pg.get_normal())
        basis = sqr([f[0], s[0], n[0],
                     f[1], s[1], n[1],
                     f[2], s[2], n[2]])
        rotX, rotY, rotZ = basis.r3_rotation_matrix_as_x_y_z_angles(deg=True)
        rot_X.append(rotX)
        rot_Y.append(rotY)
        rot_Z.append(rotZ)

      all_lab_x.extend(lab_x)
      all_lab_y.extend(lab_y)
      all_lab_z.extend(lab_z)
      all_rotX.extend(rot_X)
      all_rotY.extend(rot_Y)
      all_rotZ.extend(rot_Z)

      pg_weights = flex.double([pg1_refls, pg2_refls])
      if 0 in pg_weights:
        dist_m = dist_s = 0
        lx_m = lx_s = ly_m = ly_s = lz_m = lz_s = 0
        lrx_m = lrx_s = lry_m = lry_s = lrz_m = lrz_s = 0
        dx = dy = dz = dxy = dxyz = dr = dt = dnorm = 0
      else:
        stats = flex.mean_and_variance(dists, pg_weights)
        dist_m = stats.mean()
        dist_s = stats.gsl_stats_wsd()

        stats = flex.mean_and_variance(lab_x, pg_weights)
        lx_m = stats.mean()
        lx_s = stats.gsl_stats_wsd()

        stats = flex.mean_and_variance(lab_y, pg_weights)
        ly_m = stats.mean()
        ly_s = stats.gsl_stats_wsd()

        stats = flex.mean_and_variance(lab_z, pg_weights)
        lz_m = stats.mean()
        lz_s = stats.gsl_stats_wsd()

        stats = flex.mean_and_variance(rot_X, pg_weights)
        lrx_m = stats.mean()
        lrx_s = stats.gsl_stats_wsd()

        stats = flex.mean_and_variance(rot_Y, pg_weights)
        lry_m = stats.mean()
        lry_s = stats.gsl_stats_wsd()

        stats = flex.mean_and_variance(rot_Z, pg_weights)
        lrz_m = stats.mean()
        lrz_s = stats.gsl_stats_wsd()

        dx = lab_x[0] - lab_x[1]
        dy = lab_y[0] - lab_y[1]
        dz = lab_z[0] - lab_z[1]
        dxy = math.sqrt(dx**2+dy**2)
        dxyz = math.sqrt(dx**2+dy**2+dz**2)

        delta = col([lab_x[0], lab_y[0], lab_z[0]]) - col([lab_x[1], lab_y[1], lab_z[1]])
        pg1_center = get_center_lab(pg1).normalize()
        transverse = s0.cross(pg1_center).normalize()
        radial = transverse.cross(s0).normalize()
        dr = delta.dot(radial)
        dt = delta.dot(transverse)
        dnorm = col(pg1.get_normal()).angle(col(pg2.get_normal()), deg=True)

      pg_bc_dists.append(dist_m)
      pg_lab_x_sigmas.append(lx_s)
      pg_lab_y_sigmas.append(ly_s)
      pg_lab_z_sigmas.append(lz_s)
      pg_rotX_sigmas.append(lrx_s)
      pg_rotY_sigmas.append(lry_s)
      pg_rotZ_sigmas.append(lrz_s)
      all_delta_x.append(dx)
      all_delta_y.append(dy)
      all_delta_z.append(dz)
      all_delta_xy.append(dxy)
      all_delta_xyz.append(dxyz)
      all_delta_r.append(dr)
      all_delta_t.append(dt)
      all_delta_norm.append(dnorm)

      lab_table_data.append(["%d"%pg_id, "%5.1f"%dist_m,
                             "%9.3f"%lx_m, "%9.3f"%lx_s,
                             "%9.3f"%ly_m, "%9.3f"%ly_s,
                             "%9.3f"%lz_m, "%9.3f"%lz_s,
                             "%9.3f"%lrx_m, "%9.3f"%lrx_s,
                             "%9.3f"%lry_m, "%9.3f"%lry_s,
                             "%9.3f"%lrz_m, "%9.3f"%lrz_s,
                             "%6d"%total_refls])

      lab_delta_table_data.append(["%d"%pg_id, "%5.1f"%dist_m,
                                   "%9.1f"%(dx*1000), "%9.1f"%(dy*1000), "%9.3f"%dz, "%9.1f"%(dxy*1000), "%9.3f"%dxyz,
                                   "%9.1f"%(dr*1000), "%9.1f"%(dt*1000), "%9.3f"%dnorm,
                                   "%6d"%total_refls])

      if params.hierarchy_level > 0:
        local_x = flex.double()
        local_y = flex.double()
        local_z = flex.double()
        l_rot_X = flex.double()
        l_rot_Y = flex.double()
        l_rot_Z = flex.double()
        l_dx = flex.double()
        l_dy = flex.double()
        l_dz = flex.double()
        l_dxy = flex.double()
        l_dxyz = flex.double()

        for pg in [pg1, pg2]:

          l_ori = pg.get_local_origin()
          local_x.append(l_ori[0])
          local_y.append(l_ori[1])
          local_z.append(l_ori[2])

          f = col(pg.get_local_fast_axis())
          s = col(pg.get_local_slow_axis())
          n = f.cross(s)
          basis = sqr([f[0], s[0], n[0],
                       f[1], s[1], n[1],
                       f[2], s[2], n[2]])
          rotX, rotY, rotZ = basis.r3_rotation_matrix_as_x_y_z_angles(deg=True)
          l_rot_X.append(rotX)
          l_rot_Y.append(rotY)
          l_rot_Z.append(rotZ)

        all_local_x.extend(local_x)
        all_local_y.extend(local_y)
        all_local_z.extend(local_z)
        all_local_rotX.extend(l_rot_X)
        all_local_rotY.extend(l_rot_Y)
        all_local_rotZ.extend(l_rot_Z)

        pg_weights = flex.double([pg1_refls, pg2_refls])
        if 0 in pg_weights:
          lx_m = lx_s = ly_m = ly_s = lz_m = lz_s = 0
          lrx_m = lrx_s = lry_m = lry_s = lrz_m = lrz_s = 0
          ldx = ldy = ldz = ldxy = ldxyz = 0
        else:
          stats = flex.mean_and_variance(local_x, pg_weights)
          lx_m = stats.mean()
          lx_s = stats.gsl_stats_wsd()

          stats = flex.mean_and_variance(local_y, pg_weights)
          ly_m = stats.mean()
          ly_s = stats.gsl_stats_wsd()

          stats = flex.mean_and_variance(local_z, pg_weights)
          lz_m = stats.mean()
          lz_s = stats.gsl_stats_wsd()

          stats = flex.mean_and_variance(l_rot_X, pg_weights)
          lrx_m = stats.mean()
          lrx_s = stats.gsl_stats_wsd()

          stats = flex.mean_and_variance(l_rot_Y, pg_weights)
          lry_m = stats.mean()
          lry_s = stats.gsl_stats_wsd()

          stats = flex.mean_and_variance(l_rot_Z, pg_weights)
          lrz_m = stats.mean()
          lrz_s = stats.gsl_stats_wsd()

          ldx = local_x[0] - local_x[1]
          ldy = local_y[0] - local_y[1]
          ldz = local_z[0] - local_z[1]
          ldxy = math.sqrt(ldx**2+ldy**2)
          ldxyz = math.sqrt(ldx**2+ldy**2+ldz**2)

        pg_local_x_sigmas.append(lx_s)
        pg_local_y_sigmas.append(ly_s)
        pg_local_z_sigmas.append(lz_s)
        pg_local_rotX_sigmas.append(lrx_s)
        pg_local_rotY_sigmas.append(lry_s)
        pg_local_rotZ_sigmas.append(lrz_s)
        all_local_delta_x.append(ldx)
        all_local_delta_y.append(ldy)
        all_local_delta_z.append(ldz)
        all_local_delta_xy.append(ldxy)
        all_local_delta_xyz.append(ldxyz)

        local_table_data.append(["%d"%pg_id, "%5.1f"%dist_m,
                               "%9.3f"%lx_m, "%9.3f"%lx_s,
                               "%9.3f"%ly_m, "%9.3f"%ly_s,
                               "%9.3f"%lz_m, "%9.3f"%lz_s,
                               "%9.3f"%lrx_m, "%9.3f"%lrx_s,
                               "%9.3f"%lry_m, "%9.3f"%lry_s,
                               "%9.3f"%lrz_m, "%9.3f"%lrz_s,
                               "%6d"%total_refls])

        local_delta_table_data.append(["%d"%pg_id, "%5.1f"%dist_m,
                                       "%9.1f"%(ldx*1000), "%9.1f"%(ldy*1000), "%9.3f"%ldz, "%9.1f"%(ldxy*1000), "%9.3f"%ldxyz,
                                       "%6d"%total_refls])

    # Set up table output, starting with lab table
    table_d = {d:row for d, row in zip(pg_bc_dists, lab_table_data)}
    table_header = ["PanelG","Radial","Lab X","Lab X","Lab Y","Lab Y","Lab Z","Lab Z","Rot X","Rot X","Rot Y","Rot Y","Rot Z","Rot Z","N"]
    table_header2 = ["Id","Dist","","Sigma","","Sigma","","Sigma","","Sigma","","Sigma","","Sigma","Refls"]
    table_header3 = ["","(mm)","(mm)","(mm)","(mm)","(mm)","(mm)","(mm)","(deg)","(deg)","(deg)","(deg)","(deg)","(deg)",""]
    lab_table_data = [table_header, table_header2, table_header3]
    lab_table_data.extend([table_d[key] for key in sorted(table_d)])

    if len(all_weights) > 1:
      r1 = ["All"]
      r2 = ["Mean"]
      for data, weights, fmt in [[None,None,None],
                                 [all_lab_x,               all_weights.as_double(),     "%9.3f"],
                                 [pg_lab_x_sigmas,         all_refls_count.as_double(), "%9.3f"],
                                 [all_lab_y,               all_weights.as_double(),     "%9.3f"],
                                 [pg_lab_y_sigmas,         all_refls_count.as_double(), "%9.3f"],
                                 [all_lab_z,               all_weights.as_double(),     "%9.3f"],
                                 [pg_lab_z_sigmas,         all_refls_count.as_double(), "%9.3f"],
                                 [all_rotX,                all_weights.as_double(),     "%9.3f"],
                                 [pg_rotX_sigmas,          all_refls_count.as_double(), "%9.3f"],
                                 [all_rotY,                all_weights.as_double(),     "%9.3f"],
                                 [pg_rotY_sigmas,          all_refls_count.as_double(), "%9.3f"],
                                 [all_rotZ,                all_weights.as_double(),     "%9.3f"],
                                 [pg_rotZ_sigmas,          all_refls_count.as_double(), "%9.3f"]]:
        r2.append("")
        if data is None and weights is None:
          r1.append("")
          continue
        stats = flex.mean_and_variance(data, weights)
        r1.append(fmt%stats.mean())

      r1.append("")
      r2.append("%6.1f"%flex.mean(all_refls_count.as_double()))
      lab_table_data.append(r1)
      lab_table_data.append(r2)

    from libtbx import table_utils
    print "Detector statistics relative to lab origin"
    print table_utils.format(lab_table_data,has_header=3,justify='center',delim=" ")
    print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
    print "Radial dist: distance from center of panel group to the beam center"
    print "Lab X, Y and Z: mean coordinate in lab space"
    print "Rot X, Y and Z: rotation of panel group around lab X, Y and Z axes"
    print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
    print "All: weighted mean of the values shown"
    print

    # Next, deltas in lab space
    table_d = {d:row for d, row in zip(pg_bc_dists, lab_delta_table_data)}
    table_header = ["PanelG","Radial","Lab dX","Lab dY","Lab dZ","Lab dXY","Lab dXYZ","Lab dR","Lab dT","Lab dNorm","N"]
    table_header2 = ["Id","Dist","","","","","","","","","Refls"]
    table_header3 = ["","(mm)","(microns)","(microns)","(mm)","(microns)","(mm)","(microns)","(microns)","(deg)",""]
    lab_delta_table_data = [table_header, table_header2, table_header3]
    lab_delta_table_data.extend([table_d[key] for key in sorted(table_d)])

    if len(all_weights) > 1:
      r1 = ["WMean"]
      r2 = ["WStddev"]
      r3 = ["Mean"]
      for data, weights, fmt in [[None,None,None],
                                 [all_delta_x*1000,          all_refls_count.as_double(),     "%9.1f"],
                                 [all_delta_y*1000,          all_refls_count.as_double(),     "%9.1f"],
                                 [all_delta_z,               all_refls_count.as_double(),     "%9.3f"],
                                 [all_delta_xy*1000,         all_refls_count.as_double(),     "%9.1f"],
                                 [all_delta_xyz,             all_refls_count.as_double(),     "%9.3f"],
                                 [all_delta_r*1000,          all_refls_count.as_double(),     "%9.1f"],
                                 [all_delta_t*1000,          all_refls_count.as_double(),     "%9.1f"],
                                 [all_delta_norm,            all_refls_count.as_double(),     "%9.3f"]]:
        r3.append("")
        if data is None and weights is None:
          r1.append("")
          r2.append("")
          continue
        stats = flex.mean_and_variance(data, weights)
        r1.append(fmt%stats.mean())
        if len(data) > 1:
          r2.append(fmt%stats.gsl_stats_wsd())
        else:
          r2.append("-")

      r1.append("")
      r2.append("")
      r3.append("%6.1f"%flex.mean(all_refls_count.as_double()))
      lab_delta_table_data.append(r1)
      lab_delta_table_data.append(r2)
      lab_delta_table_data.append(r3)

    print "Detector deltas in lab space"
    print table_utils.format(lab_delta_table_data,has_header=3,justify='center',delim=" ")
    print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
    print "Radial dist: distance from center of panel group to the beam center"
    print "Lab dX, dY and dZ: delta between X, Y and Z coordinates in lab space"
    print "Lab dR, dT and dZ: radial and transverse components of dXY in lab space"
    print "Lab dNorm: angle between normal vectors in lab space"
    print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
    print "WMean: weighted mean of the values shown"
    print "WStddev: weighted standard deviation of the values shown"
    print "Mean: mean of the values shown"
    print

    if params.hierarchy_level > 0:
      # Local table
      table_d = {d:row for d, row in zip(pg_bc_dists, local_table_data)}
      table_header = ["PanelG","Radial","Local X","Local X","Local Y","Local Y","Local Z","Local Z","Rot X","Rot X","Rot Y","Rot Y","Rot Z","Rot Z","N"]
      table_header2 = ["Id","Dist","","Sigma","","Sigma","","Sigma","","Sigma","","Sigma","","Sigma","Refls"]
      table_header3 = ["","(mm)","(mm)","(mm)","(mm)","(mm)","(mm)","(mm)","(deg)","(deg)","(deg)","(deg)","(deg)","(deg)",""]
      local_table_data = [table_header, table_header2, table_header3]
      local_table_data.extend([table_d[key] for key in sorted(table_d)])

      if len(all_weights) > 1:
        r1 = ["All"]
        r2 = ["Mean"]
        for data, weights, fmt in [[None,None,None],
                                   [all_local_x,               all_weights.as_double(),     "%9.3f"],
                                   [pg_local_x_sigmas,         all_refls_count.as_double(), "%9.3f"],
                                   [all_local_y,               all_weights.as_double(),     "%9.3f"],
                                   [pg_local_y_sigmas,         all_refls_count.as_double(), "%9.3f"],
                                   [all_local_z,               all_weights.as_double(),     "%9.3f"],
                                   [pg_local_z_sigmas,         all_refls_count.as_double(), "%9.3f"],
                                   [all_local_rotX,            all_weights.as_double(),     "%9.3f"],
                                   [pg_local_rotX_sigmas,      all_refls_count.as_double(), "%9.3f"],
                                   [all_local_rotY,            all_weights.as_double(),     "%9.3f"],
                                   [pg_local_rotY_sigmas,      all_refls_count.as_double(), "%9.3f"],
                                   [all_local_rotZ,            all_weights.as_double(),     "%9.3f"],
                                   [pg_local_rotZ_sigmas,      all_refls_count.as_double(), "%9.3f"]]:
          r2.append("")
          if data is None and weights is None:
            r1.append("")
            continue
          stats = flex.mean_and_variance(data, weights)
          r1.append(fmt%stats.mean())

        r1.append("")
        r2.append("%6.1f"%flex.mean(all_refls_count.as_double()))
        local_table_data.append(r1)
        local_table_data.append(r2)

      print "Detector statistics in local frame of each panel group"
      print table_utils.format(local_table_data,has_header=3,justify='center',delim=" ")
      print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
      print "Radial dist: distance from center of panel group to the beam center"
      print "Lab X, Y and Z: mean coordinate in relative to parent panel group"
      print "Rot X, Y and Z: rotation of panel group around parent panel group X, Y and Z axes"
      print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
      print "All: weighted mean of the values shown"
      print

      # Next, deltas in local space
      table_d = {d:row for d, row in zip(pg_bc_dists, local_delta_table_data)}
      table_header = ["PanelG","Radial","Local dX","Local dY","Local dZ","Local dXY","Local dXYZ","N"]
      table_header2 = ["Id","Dist","","","","","","Refls"]
      table_header3 = ["","(mm)","(microns)","(microns)","(mm)","(microns)","(mm)",""]
      local_delta_table_data = [table_header, table_header2, table_header3]
      local_delta_table_data.extend([table_d[key] for key in sorted(table_d)])

      if len(all_weights) > 1:
        r1 = ["WMean"]
        r2 = ["WStddev"]
        r3 = ["Mean"]
        for data, weights, fmt in [[None,None,None],
                                   [all_local_delta_x*1000,          all_refls_count.as_double(),     "%9.1f"],
                                   [all_local_delta_y*1000,          all_refls_count.as_double(),     "%9.1f"],
                                   [all_local_delta_z,               all_refls_count.as_double(),     "%9.3f"],
                                   [all_local_delta_xy*1000,         all_refls_count.as_double(),     "%9.1f"],
                                   [all_local_delta_xyz,             all_refls_count.as_double(),     "%9.3f"]]:
          r3.append("")
          if data is None and weights is None:
            r1.append("")
            r2.append("")
            continue
          stats = flex.mean_and_variance(data, weights)
          r1.append(fmt%stats.mean())
          r2.append(fmt%stats.gsl_stats_wsd())

        r1.append("")
        r2.append("")
        r3.append("%6.1f"%flex.mean(all_refls_count.as_double()))
        local_delta_table_data.append(r1)
        local_delta_table_data.append(r2)
        local_delta_table_data.append(r3)

      print "Detector deltas relative to panel group origin"
      print table_utils.format(local_delta_table_data,has_header=3,justify='center',delim=" ")
      print "PanelG Id: panel group id or panel id, depending on hierarchy_level. For each panel group, weighted means and weighted standard deviations (Sigmas) for the properties listed below are computed using the matching panel groups between the input experiments."
      print "Radial dist: distance from center of panel group to the beam center"
      print "Local dX, dY and dZ: delta between X, Y and Z coordinates in the local frame of the panel group"
      print "N refls: number of reflections summed between both matching panel groups. This number is used as a weight when computing means and standard deviations."
      print "All: weighted mean of the values shown"
      print

    #RMSD table
    table_d = {d:row for d, row in zip(pg_bc_dists, rmsds_table_data)}
    table_header = ["PanelG"]
    table_header2 = ["Id"]
    table_header3 = [""]
    for i in xrange(len(detectors)):
      table_header.extend(["D%d"%i]*4)
      table_header2.extend(["RMSD", "rRMSD", "tRMSD", "N refls"])
      table_header3.extend(["(microns)"]*3)
      table_header3.append("")
    rmsds_table_data = [table_header, table_header2, table_header3]
    rmsds_table_data.extend([table_d[key] for key in sorted(table_d)])

    row = ["Overall"]
    for refls in reflections:
      row.append("%6.1f"%(math.sqrt(flex.sum_sq(refls['difference_vector_norms'])/len(refls))*1000))
      row.append("%6.1f"%(math.sqrt(flex.sum_sq(refls['radial_displacements'])/len(refls))*1000))
      row.append("%6.1f"%(math.sqrt(flex.sum_sq(refls['transverse_displacements'])/len(refls))*1000))
      row.append("%8d"%len(refls))
    rmsds_table_data.append(row)

    print "RMSDs by detector number"
    print table_utils.format(rmsds_table_data,has_header=3,justify='center',delim=" ")
    print "PanelG Id: panel group id or panel id, depending on hierarchy_level"
    print "RMSD: root mean squared deviation between observed and predicted spot locations"
    print "rRMSD: RMSD of radial components of the observed-predicted vectors"
    print "tRMSD: RMSD of transverse components of the observed-predicted vectors"
    print "N refls: number of reflections"

    if params.tag is None:
      tag = ""
    else:
      tag = "%s "%params.tag

    if params.show_plots:
      # Plot the results
      self.detector_plot_dict(detectors[0], refl_counts, u"%sN reflections"%tag, u"%6d", show=False)
Beispiel #58
0
def run(args):
    import libtbx.load_env
    usage = "%s [options] datablock.json strong.pickle" % libtbx.env.dispatcher_name

    parser = OptionParser(usage=usage,
                          read_reflections=True,
                          read_datablocks=True,
                          read_experiments=True,
                          phil=phil_scope,
                          check_format=False,
                          epilog=help_message)
    from libtbx.utils import Sorry

    params, options = parser.parse_args(show_diff_phil=False)
    reflections = flatten_reflections(params.input.reflections)
    datablocks = flatten_datablocks(params.input.datablock)
    experiments = flatten_experiments(params.input.experiments)

    if not any([reflections, experiments, datablocks]):
        parser.print_help()
        return

    if len(reflections) != 1:
        raise Sorry('exactly 1 reflection table must be specified')
    if len(datablocks) != 1:
        if experiments:
            if len(experiments.imagesets()) != 1:
                raise Sorry('exactly 1 datablock must be specified')
            imageset = experiments.imagesets()[0]
        else:
            raise Sorry('exactly 1 datablock must be specified')
    else:
        imageset = datablocks[0].extract_imagesets()[0]

    reflections = reflections[0]

    if params.id is not None:
        reflections = reflections.select(reflections['id'] == params.id)

    stats = per_image_analysis.stats_imageset(
        imageset,
        reflections,
        resolution_analysis=params.resolution_analysis,
        plot=params.individual_plots)
    per_image_analysis.print_table(stats)

    from libtbx import table_utils
    overall_stats = per_image_analysis.stats_single_image(
        imageset, reflections, resolution_analysis=params.resolution_analysis)
    rows = [
        ("Overall statistics", ""),
        ("#spots", "%i" % overall_stats.n_spots_total),
        ("#spots_no_ice", "%i" % overall_stats.n_spots_no_ice),
        #("total_intensity", "%.0f" %overall_stats.total_intensity),
        ("d_min", "%.2f" % overall_stats.estimated_d_min),
        ("d_min (distl method 1)",
         "%.2f (%.2f)" % (overall_stats.d_min_distl_method_1,
                          overall_stats.noisiness_method_1)),
        ("d_min (distl method 2)",
         "%.2f (%.2f)" % (overall_stats.d_min_distl_method_1,
                          overall_stats.noisiness_method_1)),
    ]
    print table_utils.format(rows, has_header=True, prefix="| ", postfix=" |")

    if params.json is not None:
        import json
        with open(params.json, 'wb') as fp:
            json.dump(stats.__dict__, fp)
    if params.plot is not None:
        per_image_analysis.plot_stats(stats, filename=params.plot)
Beispiel #59
0
def run(args):
    from dials.util import log
    usage = "%s [options] datablock.json strong.pickle" % libtbx.env.dispatcher_name

    parser = OptionParser(usage=usage,
                          phil=phil_scope,
                          read_datablocks=True,
                          read_reflections=True,
                          check_format=False,
                          epilog=help_message)

    params, options = parser.parse_args(show_diff_phil=False)
    datablocks = flatten_datablocks(params.input.datablock)
    reflections = flatten_reflections(params.input.reflections)

    if len(datablocks) == 0 or len(reflections) == 0:
        parser.print_help()
        exit(0)

    # Configure the logging
    log.config(info=params.output.log, debug=params.output.debug_log)

    # Log the diff phil
    diff_phil = parser.diff_phil.as_str()
    if diff_phil is not '':
        logger.info('The following parameters have been modified:\n')
        logger.info(diff_phil)

    if params.seed is not None:
        import random
        flex.set_random_seed(params.seed)
        random.seed(params.seed)

    imagesets = []
    for datablock in datablocks:
        imagesets.extend(datablock.extract_imagesets())

    assert len(imagesets) > 0
    assert len(reflections) == len(imagesets)

    if params.scan_range is not None and len(params.scan_range) > 0:
        reflections = [
            filter_reflections_by_scan_range(refl, params.scan_range)
            for refl in reflections
        ]

    dps_params = dps_phil_scope.extract()
    # for development, we want an exhaustive plot of beam probability map:
    dps_params.indexing.plot_search_scope = params.plot_search_scope
    dps_params.indexing.mm_search_scope = params.mm_search_scope

    for i in range(params.n_macro_cycles):
        if params.n_macro_cycles > 1:
            logger.info('Starting macro cycle %i' % (i + 1))
        new_detector, new_beam = discover_better_experimental_model(
            imagesets,
            reflections,
            params,
            dps_params,
            nproc=params.nproc,
            wide_search_binning=params.wide_search_binning)
        for imageset in imagesets:
            imageset.set_detector(new_detector)
            imageset.set_beam(new_beam)
        logger.info('')

    from dxtbx.serialize import dump
    logger.info("Saving optimized datablock to %s" % params.output.datablock)
    dump.datablock(datablock, params.output.datablock)
Beispiel #60
0
  def run(self):
    ''' Perform the integration. '''
    from dials.util.command_line import heading
    from dials.util.options import flatten_datablocks, flatten_experiments
    from dials.util import log
    from time import time
    from libtbx.utils import Sorry
    from dials.array_family import flex

    # Parse the command line
    params, options = self.parser.parse_args(show_diff_phil=False)
    experiments = flatten_experiments(params.input.experiments)
    datablocks = flatten_datablocks(params.input.datablock)
    if len(experiments) == 0 and len(datablocks) == 0:
      self.parser.print_help()
      return

    if len(datablocks) > 0:
      assert len(datablocks) == 1
      imagesets = datablocks[0].extract_imagesets()
      assert len(imagesets) == 1
      imageset = imagesets[0]
      beam = imageset.get_beam()
      detector = imageset.get_detector()
    else:
      assert len(experiments) == 1
      imageset = experiments[0].imageset
      beam = experiments[0].beam
      detector = experiments[0].detector

    # Configure logging
    log.config()

    # Set the scan range
    if params.scan_range is None:
      scan_range = (0, len(imageset))
    else:
      scan_range = params.scan_range
      i0, i1 = scan_range
      if i0 < 0 or i1 > len(imageset):
        raise RuntimeError('Scan range outside image range')
      if i0 >= i1:
        raise RuntimeError('Invalid scan range')

    summed_data = None
    summed_mask = None

    # Loop through images
    for i in range(*scan_range):
      logger.info("Reading image %d" % i)

      # Read image
      data = imageset.get_raw_data(i)
      mask = imageset.get_mask(i)
      assert isinstance(data, tuple)
      assert isinstance(mask, tuple)

      if summed_data is None:
        summed_mask = mask
        summed_data = data
      else:
        summed_data = [ sd + d for sd, d in zip(summed_data, data) ]
        summed_mask = [ sm & m for sm, m in zip(summed_mask, mask) ]

    # Compute min and max and num
    if params.num_bins is None:
      num_bins = sum(sum(p.get_image_size()) for p in detector)
    if params.d_max is None:
      vmin = 0
    else:
      vmin = (1.0 / d_max)**2
    if params.d_min is None:
      params.d_min = detector.get_max_resolution(beam.get_s0())
    vmax = (1.0 / params.d_min)**2

    # Print some info
    logger.info("Min 1/d^2: %f" % vmin)
    logger.info("Max 1/d^2: %f" % vmax)
    logger.info("Num bins:  %d" % num_bins)

    # Compute the radial average
    from dials.algorithms.background import RadialAverage
    radial_average = RadialAverage(beam, detector, vmin, vmax, num_bins)
    for d, m in zip(summed_data, summed_mask):
      radial_average.add(d.as_double(), m)
    mean = radial_average.mean()
    reso = radial_average.inv_d2()

    logger.info("Writing to %s" % params.output.filename)
    with open(params.output.filename, "w") as outfile:
      for r, m in zip(reso, mean):
        outfile.write("%f, %f\n" % (r, m))