Example #1
0
File: index.py Project: hainm/xia2
def run():
  try:
    check_environment()
    check()
  except exceptions.Exception, e:
    traceback.print_exc(file = open('xia2.error', 'w'))
    Chatter.write('Status: error "%s"' % str(e))
Example #2
0
        def run(self):
            """Run dials.space_group"""
            self.clear_command_line()

            assert self._experiments_filename
            assert self._reflections_filename
            self.add_command_line(self._reflections_filename)
            self.add_command_line(self._experiments_filename)

            if not self._symmetrized_experiments:
                self._symmetrized_experiments = os.path.join(
                    self.get_working_directory(),
                    "%i_symmetrized.expt" % self.get_xpid(),
                )
            self.add_command_line(
                "output.experiments=%s" % self._symmetrized_experiments
            )

            self.start()
            self.close_wait()

            # check for errors

            try:
                self.check_for_errors()
            except Exception:
                Chatter.write(
                    "dials.space_group failed, see log file for more details:\n  %s"
                    % self.get_log_file()
                )
                raise

            Debug.write("dials.space_group status: OK")
Example #3
0
def run():
    if os.path.exists('xia2-working.phil'):
        sys.argv.append('xia2-working.phil')
    try:
        check_environment()
    except Exception as e:
        traceback.print_exc(file=open('xia2.error', 'w'))
        Chatter.write('Status: error "%s"' % str(e))

    if len(sys.argv) < 2 or '-help' in sys.argv:
        help()
        sys.exit()

    wd = os.getcwd()

    try:
        multi_crystal_analysis()
        Chatter.write('Status: normal termination')

    except Exception as e:
        traceback.print_exc(file=open(os.path.join(wd, 'xia2.error'), 'w'))
        Chatter.write('Status: error "%s"' % str(e))
        Chatter.write(
            'Please send the contents of xia2.txt, xia2.error and xia2-debug.txt to:'
        )
        Chatter.write('*****@*****.**')
        sys.exit(1)
Example #4
0
    def _scale_finish_chunk_7_twinning(self):
        hklout = self._scalr_scaled_reflection_files['mtz']

        m = mtz.object(hklout)
        # FIXME in here should be able to just drop down to the lowest symmetry
        # space group with the rotational elements for this calculation? I.e.
        # P422 for P4/mmm?
        if not m.space_group().is_centric():
            from xia2.Toolkit.E4 import E4_mtz
            E4s = E4_mtz(hklout, native=True)
            self._scalr_twinning_score = E4s.items()[0][1]

            if self._scalr_twinning_score > 1.9:
                self._scalr_twinning_conclusion = 'Your data do not appear twinned'
            elif self._scalr_twinning_score < 1.6:
                self._scalr_twinning_conclusion = 'Your data appear to be twinned'
            else:
                self._scalr_twinning_conclusion = 'Ambiguous score (1.6 < score < 1.9)'

        else:
            self._scalr_twinning_conclusion = 'Data are centric'
            self._scalr_twinning_score = 0

        Chatter.write('Overall twinning score: %4.2f' %
                      self._scalr_twinning_score)
        Chatter.write(self._scalr_twinning_conclusion)
def load_reference_geometries(geometry_file_list):
    from dxtbx.serialize import load

    reference_components = []
    for file in geometry_file_list:
        try:
            experiments = load.experiment_list(file, check_format=False)
            assert len(experiments.detectors()) == 1
            assert len(experiments.beams()) == 1
            reference_detector = experiments.detectors()[0]
            reference_beam = experiments.beams()[0]
        except Exception:
            experiments = load.experiment_list(file)
            imageset = experiments.imagesets()[0]
            reference_detector = imageset.get_detector()
            reference_beam = imageset.get_beam()
        reference_components.append(
            {"detector": reference_detector, "beam": reference_beam, "file": file}
        )

    import itertools

    for combination in itertools.combinations(reference_components, 2):
        if compare_geometries(combination[0]["detector"], combination[1]["detector"]):
            from xia2.Handlers.Streams import Chatter

            Chatter.write(
                "Reference geometries given in %s and %s are too similar"
                % (combination[0]["file"], combination[1]["file"])
            )
            raise Exception("Reference geometries too similar")
    return reference_components
Example #6
0
def load_reference_geometries(geometry_file_list):
    from dxtbx.serialize import load

    reference_components = []
    for file in geometry_file_list:
        try:
            experiments = load.experiment_list(file, check_format=False)
            assert len(experiments.detectors()) == 1
            assert len(experiments.beams()) == 1
            reference_detector = experiments.detectors()[0]
            reference_beam = experiments.beams()[0]
        except Exception:
            datablock = load.datablock(file)
            assert len(datablock) == 1
            imageset = datablock[0].extract_imagesets()[0]
            reference_detector = imageset.get_detector()
            reference_beam = imageset.get_beam()
        reference_components.append({
            'detector': reference_detector,
            'beam': reference_beam,
            'file': file
        })

    import itertools
    for combination in itertools.combinations(reference_components, 2):
        if compare_geometries(combination[0]['detector'],
                              combination[1]['detector']):
            from xia2.Handlers.Streams import Chatter
            Chatter.write(
                'Reference geometries given in %s and %s are too similar' %
                (combination[0]['file'], combination[1]['file']))
            raise Exception('Reference geometries too similar')
    return reference_components
Example #7
0
def run():
    assert os.path.exists("xia2.json")
    from xia2.Schema.XProject import XProject

    xinfo = XProject.from_json(filename="xia2.json")
    Chatter.write(xinfo.get_output())
    write_citations()
Example #8
0
    def scale(self):
        '''Actually perform the scaling - this is delegated to the
    implementation.'''

        if self._scalr_integraters == {}:
            raise RuntimeError( \
                  'no Integrater implementations assigned for scaling')

        xname = self._scalr_xcrystal.get_name()

        while not self.get_scaler_finish_done():
            while not self.get_scaler_done():
                while not self.get_scaler_prepare_done():

                    Chatter.banner('Preparing %s' % xname)

                    self._scalr_prepare_done = True
                    self._scale_prepare()

                Chatter.banner('Scaling %s' % xname)

                self._scalr_done = True
                self._scalr_result = self._scale()

            self._scalr_finish_done = True
            self._scale_finish()

        return self._scalr_result
Example #9
0
File: Scaler.py Project: hainm/xia2
  def scale(self):
    '''Actually perform the scaling - this is delegated to the
    implementation.'''

    if self._scalr_integraters == { }:
      raise RuntimeError, \
            'no Integrater implementations assigned for scaling'

    xname = self._scalr_xcrystal.get_name()

    while not self.get_scaler_finish_done():
      while not self.get_scaler_done():
        while not self.get_scaler_prepare_done():

          Chatter.banner('Preparing %s' % xname)

          self._scalr_prepare_done = True
          self._scale_prepare()

        Chatter.banner('Scaling %s' % xname)

        self._scalr_done = True
        self._scalr_result = self._scale()

      self._scalr_finish_done = True
      self._scale_finish()

    return self._scalr_result
Example #10
0
File: rescale.py Project: xia2/xia2
def run():
  if os.path.exists('xia2-working.phil'):
    sys.argv.append('xia2-working.phil')
  try:
    check_environment()
  except exceptions.Exception, e:
    traceback.print_exc(file = open('xia2.error', 'w'))
    Chatter.write('Status: error "%s"' % str(e))
Example #11
0
 def get_new_scales_file(self):
   '''Get the file to which the scales have been written.'''
   if self._new_scales_file:
     if not os.path.isfile(os.path.join(self.get_working_directory(), self._new_scales_file)):
       Chatter.write(
         "Aimless did not scale the data, see log file for more details:\n  %s" %self.get_log_file())
       raise RuntimeError('data not scaled')
   return os.path.join(self.get_working_directory(), self._new_scales_file)
Example #12
0
 def get_new_scales_file(self):
   '''Get the file to which the scales have been written.'''
   if self._new_scales_file:
     if not os.path.isfile(os.path.join(self.get_working_directory(), self._new_scales_file)):
       Chatter.write(
         "Aimless did not scale the data, see log file for more details:\n  %s" %self.get_log_file())
       raise RuntimeError, 'data not scaled'
   return os.path.join(self.get_working_directory(), self._new_scales_file)
Example #13
0
def which(pgm, debug=False):
  path = os.getenv('PATH')
  for p in path.split(os.path.pathsep):
    p = os.path.join(p, pgm)
    if debug:
      Chatter.write('Seeking %s' % p)
    if os.path.exists(p) and os.access(p, os.X_OK):
      return p
Example #14
0
  def _integrate(self):
    '''Implement the integrater interface.'''

    # cite the program
    Citations.cite('mosflm')

    images_str = '%d to %d' % tuple(self._intgr_wedge)
    cell_str = '%.2f %.2f %.2f %.2f %.2f %.2f' % tuple(self._intgr_cell)

    if len(self._fp_directory) <= 50:
      dirname = self._fp_directory
    else:
      dirname = '...%s' % self._fp_directory[-46:]

    Journal.block(
        'integrating', self._intgr_sweep_name, 'mosflm',
        {'images':images_str,
         'cell':cell_str,
         'lattice':self.get_integrater_refiner().get_refiner_lattice(),
         'template':self._fp_template,
         'directory':dirname,
         'resolution':'%.2f' % self._intgr_reso_high})

    self._mosflm_rerun_integration = False

    wd = self.get_working_directory()

    try:

      if self.get_integrater_sweep_name():
        pname, xname, dname = self.get_integrater_project_info()

      nproc = PhilIndex.params.xia2.settings.multiprocessing.nproc
      if nproc > 1:
        Debug.write('Parallel integration: %d jobs' %nproc)
        self._mosflm_hklout = self._mosflm_parallel_integrate()
      else:
        self._mosflm_hklout = self._mosflm_integrate()

      # record integration output for e.g. BLEND.

      sweep = self.get_integrater_sweep_name()
      if sweep:
        FileHandler.record_more_data_file(
            '%s %s %s %s INTEGRATE' % (pname, xname, dname, sweep),
            self._mosflm_hklout)

    except IntegrationError, e:
      if 'negative mosaic spread' in str(e):
        if self._mosflm_postref_fix_mosaic:
          Chatter.write(
              'Negative mosaic spread - stopping integration')
          raise BadLatticeError, 'negative mosaic spread'

        Chatter.write(
            'Negative mosaic spread - rerunning integration')
        self.set_integrater_done(False)
        self._mosflm_postref_fix_mosaic = True
Example #15
0
def run():
  from libtbx.utils import Sorry
  if len(sys.argv) < 2 or '-help' in sys.argv or '--help' in sys.argv:
    help()
    sys.exit()

  try:
    check_environment()
  except exceptions.Exception, e:
    traceback.print_exc(file = open('xia2.error', 'w'))
    Chatter.write('Status: error "%s"' % str(e))
Example #16
0
  def _index_prepare(self):
    Chatter.banner('Spotfinding %s' %self.get_indexer_sweep_name())
    super(XDSIndexerII, self)._index_prepare()

    from dials.array_family import flex
    from dials.util.ascii_art import spot_counts_per_image_plot
    reflection_pickle = spot_xds_to_reflection_pickle(
      self._indxr_payload['SPOT.XDS'],
      working_directory=self.get_working_directory())
    refl = flex.reflection_table.from_pickle(reflection_pickle)
    Chatter.write(spot_counts_per_image_plot(refl), strip=False)
Example #17
0
    def _index_prepare(self):
        Chatter.banner('Spotfinding %s' % self.get_indexer_sweep_name())
        super(XDSIndexerII, self)._index_prepare()

        from dials.array_family import flex
        from dials.util.ascii_art import spot_counts_per_image_plot
        reflection_pickle = spot_xds_to_reflection_pickle(
            self._indxr_payload['SPOT.XDS'],
            working_directory=self.get_working_directory())
        refl = flex.reflection_table.from_pickle(reflection_pickle)
        Chatter.write(spot_counts_per_image_plot(refl), strip=False)
Example #18
0
File: Indexer.py Project: xia2/xia2
  def eliminate(self, indxr_print = True):
    '''Eliminate the highest currently allowed lattice.'''

    if len(self._sorted_list) <= 1:
      raise RuntimeError, 'cannot eliminate only solution'

    if indxr_print:
      Chatter.write('Eliminating indexing solution:')
      Chatter.write(self.repr()[0])

    self._sorted_list = self._sorted_list[1:]
    def eliminate(self, indxr_print=True):
        """Eliminate the highest currently allowed lattice."""

        if len(self._sorted_list) <= 1:
            raise RuntimeError("cannot eliminate only solution")

        if indxr_print:
            Chatter.write("Eliminating indexing solution:")
            Chatter.write(self.repr()[0])

        self._sorted_list = self._sorted_list[1:]
Example #20
0
def which(pgm, debug=False):
  # python equivalent to the 'which' command
  # http://stackoverflow.com/questions/9877462/is-there-a-python-equivalent-to-the-which-command
  # FIXME this will not work on Windows as you need to check that there is a
  # .bat or a .exe extension
  # FIXME also this is implemented in Driver/DriverHelper.py:executable_exists
  path = os.getenv('PATH')
  for p in path.split(os.path.pathsep):
    p = os.path.join(p, pgm)
    if debug:
      Chatter.write('Seeking %s' % p)
    if os.path.exists(p) and os.access(p, os.X_OK):
      return p
Example #21
0
    def _scale_finish_chunk_8_raddam(self):
        crd = CCP4InterRadiationDamageDetector()

        crd.set_working_directory(self.get_working_directory())

        crd.set_hklin(self._scalr_scaled_reflection_files['mtz'])

        if self.get_scaler_anomalous():
            crd.set_anomalous(True)

        hklout = os.path.join(self.get_working_directory(), 'temp.mtz')
        FileHandler.record_temporary_file(hklout)

        crd.set_hklout(hklout)

        status = crd.detect()

        if status:
            Chatter.write('')
            Chatter.banner('Local Scaling %s' % self._scalr_xname)
            for s in status:
                Chatter.write('%s %s' % s)
            Chatter.banner('')
        else:
            Debug.write('Local scaling failed')
Example #22
0
File: Refine.py Project: xia2/xia2
    def run(self):
      from xia2.Handlers.Streams import Chatter, Debug
      Debug.write('Running dials.refine')

      self.clear_command_line()
      self.add_command_line(self._experiments_filename)
      self.add_command_line(self._indexed_filename)
      self.add_command_line('scan_varying=%s' % self._scan_varying)
      if self._close_to_spindle_cutoff is not None:
        self.add_command_line(
          'close_to_spindle_cutoff=%f' %self._close_to_spindle_cutoff)
      if self._outlier_algorithm:
        self.add_command_line('outlier.algorithm=%s' % self._outlier_algorithm)
      self._refined_experiments_filename = os.path.join(
        self.get_working_directory(),
        '%s_refined_experiments.json' % self.get_xpid())
      self.add_command_line(
        'output.experiments=%s' % self._refined_experiments_filename)
      self._refined_filename = os.path.join(
        self.get_working_directory(), '%s_refined.pickle' % self.get_xpid())
      self.add_command_line('output.reflections=%s' % self._refined_filename)
      if self._reflections_per_degree is not None:
        self.add_command_line(
          'reflections_per_degree=%i' %self._reflections_per_degree)
      if self._interval_width_degrees is not None:
        self.add_command_line(
          'unit_cell.smoother.interval_width_degrees=%i' % self._interval_width_degrees)
        self.add_command_line(
          'orientation.smoother.interval_width_degrees=%i' % self._interval_width_degrees)
      if self._detector_fix:
        self.add_command_line('detector.fix=%s' % self._detector_fix)
      if self._beam_fix:
        self.add_command_line('beam.fix=%s' % self._beam_fix)
      if self._phil_file is not None:
        self.add_command_line('%s' %self._phil_file)

      self.start()
      self.close_wait()

      if not os.path.isfile(self._refined_filename) or \
         not os.path.isfile(self._refined_experiments_filename):
        Chatter.write(
          "DIALS did not refine the data, see log file for more details:\n  %s" %self.get_log_file())
        raise RuntimeError, 'data not refined'
      for record in self.get_all_output():
        if 'Sorry: Too few reflections to' in record:
          raise RuntimeError, record.strip()

      self.check_for_errors()
      return
Example #23
0
def make_logfile_html(logfile):
    tables = extract_loggraph_tables(logfile)
    if not tables:
        return

    rst = []
    ## local files in xia2 distro
    #c3css = os.path.join(xia2_root_dir, 'c3', 'c3.css')
    #c3js = os.path.join(xia2_root_dir, 'c3', 'c3.min.js')
    #d3js = os.path.join(xia2_root_dir, 'd3', 'd3.min.js')

    # webhosted files
    c3css = 'https://cdnjs.cloudflare.com/ajax/libs/c3/0.4.10/c3.css'
    c3js = 'https://cdnjs.cloudflare.com/ajax/libs/c3/0.4.10/c3.min.js'
    d3js = 'https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js'

    rst.append('.. raw:: html')
    rst.append('\n    '.join([
        '', '<!-- Load c3.css -->',
        '<link href="%s" rel="stylesheet" type="text/css">' % c3css,
        '<!-- Load d3.js and c3.js -->',
        '<script src="%s" charset="utf-8"></script>' % d3js,
        '<script src="%s"></script>' % c3js, ''
    ]))

    for table in tables:
        try:
            #for graph_name, html in table_to_google_charts(table).iteritems():
            for graph_name, html in table_to_c3js_charts(table).iteritems():
                #rst.append('.. __%s:\n' %graph_name)
                rst.append('.. raw:: html')
                rst.append('\n    '.join(html.split('\n')))
        except Exception as e:
            Chatter.write('=' * 80)
            Chatter.write('Error (%s) while processing table' % str(e))
            Chatter.write("  '%s'" % table.title)
            Chatter.write('in %s' % logfile)
            Chatter.write('=' * 80)
            Debug.write(
                'Exception raised while processing log file %s, table %s' %
                (logfile, table.title))
            Debug.write(traceback.format_exc())

    rst = '\n'.join(rst)

    html_file = '%s.html' % (os.path.splitext(logfile)[0])
    with open(html_file, 'wb') as f:
        f.write(rst2html(rst))
    return html_file
def run():
    if os.path.exists("xia2-working.phil"):
        sys.argv.append("xia2-working.phil")
    try:
        check_environment()
    except Exception as e:
        traceback.print_exc(file=open("xia2.error", "w"))
        Chatter.write('Status: error "%s"' % str(e))

    # print the version
    Chatter.write(Version)
    Citations.cite("xia2")

    start_time = time.time()

    assert os.path.exists("xia2.json")
    from xia2.Schema.XProject import XProject

    xinfo = XProject.from_json(filename="xia2.json")

    crystals = xinfo.get_crystals()
    for crystal_id, crystal in crystals.iteritems():
        # cwd = os.path.abspath(os.curdir)
        from libtbx import Auto

        scale_dir = PhilIndex.params.xia2.settings.scale.directory
        if scale_dir is Auto:
            scale_dir = "scale"
            i = 0
            while os.path.exists(os.path.join(crystal.get_name(), scale_dir)):
                i += 1
                scale_dir = "scale%i" % i
            PhilIndex.params.xia2.settings.scale.directory = scale_dir
        working_directory = Environment.generate_directory(
            [crystal.get_name(), scale_dir])
        # os.chdir(working_directory)

        crystals[crystal_id]._scaler = None  # reset scaler

        scaler = crystal._get_scaler()
        Chatter.write(xinfo.get_output())
        crystal.serialize()

    duration = time.time() - start_time

    # write out the time taken in a human readable way
    Chatter.write("Processing took %s" %
                  time.strftime("%Hh %Mm %Ss", time.gmtime(duration)))

    # delete all of the temporary mtz files...
    cleanup()

    write_citations()

    xinfo.as_json(filename="xia2.json")

    Environment.cleanup()
Example #25
0
  def integrate(self):
    '''Actually perform integration until we think we are done...'''

    while not self.get_integrater_finish_done():
      while not self.get_integrater_done():
        while not self.get_integrater_prepare_done():

          Debug.write('Preparing to do some integration...')
          self.set_integrater_prepare_done(True)

          # if this raises an exception, perhaps the autoindexing
          # solution has too high symmetry. if this the case, then
          # perform a self._intgr_indexer.eliminate() - this should
          # reset the indexing system

          try:
            self._integrate_prepare()

          except BadLatticeError, e:

            Journal.banner('eliminated this lattice', size = 80)

            Chatter.write('Rejecting bad lattice %s' % str(e))
            self._intgr_refiner.eliminate()
            self._integrater_reset()

        # FIXME x1698 - may be the case that _integrate() returns the
        # raw intensities, _integrate_finish() returns intensities
        # which may have been adjusted or corrected. See #1698 below.

        Debug.write('Doing some integration...')

        self.set_integrater_done(True)

        template = self.get_integrater_sweep().get_template()

        if self._intgr_sweep_name:
          if PhilIndex.params.xia2.settings.show_template:
            Chatter.banner('Integrating %s (%s)' % \
                           (self._intgr_sweep_name, template))
          else:
            Chatter.banner('Integrating %s' % \
                           (self._intgr_sweep_name))
        try:

          #1698
          self._intgr_hklout_raw = self._integrate()

        except BadLatticeError, e:
          Chatter.write('Rejecting bad lattice %s' % str(e))

          Journal.banner('eliminated this lattice', size = 80)

          self._intgr_refiner.eliminate()
          self._integrater_reset()
Example #26
0
        def run(self):
            """Run dials.merge"""
            self.clear_command_line()

            assert self._experiments_filename
            assert self._reflections_filename
            self.add_command_line(self._reflections_filename)
            self.add_command_line(self._experiments_filename)

            self.add_command_line("truncate=%s" % self._truncate)

            if self._mtz_filename:
                self.add_command_line("output.mtz=%s" % self._mtz_filename)

            if self._project_name:
                self.add_command_line("output.project_name=%s" %
                                      self._project_name)
            if self._crystal_names:
                self.add_command_line("output.crystal_names=%s" %
                                      self._crystal_names)
            if self._dataset_names:
                self.add_command_line("output.dataset_names=%s" %
                                      self._dataset_names)
            if self._partiality_threshold:
                self.add_command_line("partiality_threshold=%s" %
                                      self._partiality_threshold)

            self.start()
            self.close_wait()

            # check for errors

            try:
                self.check_for_errors()
            except Exception:
                Chatter.write(
                    "dials.merge failed, see log file for more details:\n  %s"
                    % self.get_log_file())
                raise

            Debug.write("dials.merge status: OK")
Example #27
0
    def run_one_sweep(args):

      from xia2.Handlers.Streams import Debug

      assert len(args) == 3
      s, failover, job_type = args

      if job_type:
        DriverFactory.set_driver_type(job_type)

      Chatter.cache()
      Debug.cache()

      try:
        s.get_integrater_intensities()
      except Exception as e:
        if failover:
          Chatter.write('Processing sweep %s failed: %s' % \
                        (s.get_name(), str(e)))
          s = None
        else:
          raise
      finally:
        Chatter.uncache()
        Debug.uncache()
        return s
Example #28
0
def run():
    try:
        check_environment()
    except Exception as e:
        with open("xia2.error", "w") as fh:
            traceback.print_exc(file=fh)
        Chatter.write('Status: error "%s"' % str(e))
        sys.exit(1)

    if len(sys.argv) < 2 or "-help" in sys.argv:
        help()
        sys.exit()

    wd = os.getcwd()

    try:
        from xia2.command_line.xia2_main import xia2_main

        xia2_main(stop_after="integrate")
        Chatter.write("Status: normal termination")

    except Exception as e:
        with open(os.path.join(wd, "xia2.error"), "w") as fh:
            traceback.print_exc(file=fh)
        Chatter.write('Status: error "%s"' % str(e))
        sys.exit(1)
Example #29
0
File: html.py Project: xia2/xia2
def make_logfile_html(logfile):
    tables = extract_loggraph_tables(logfile)
    if not tables:
      return

    rst = []

    for table in tables:
      try:
        #for graph_name, html in table_to_google_charts(table).iteritems():
        for graph_name, html in table_to_c3js_charts(table).iteritems():
          #rst.append('.. __%s:\n' %graph_name)
          rst.append('.. raw:: html')
          rst.append('\n    '.join(html.split('\n')))
      except StandardError, e:
        Chatter.write('=' * 80)
        Chatter.write('Error (%s) while processing table' % str(e))
        Chatter.write("  '%s'" % table.title)
        Chatter.write('in %s' % logfile)
        Chatter.write('=' * 80)
        Debug.write('Exception raised while processing log file %s, table %s' % (logfile, table.title))
        Debug.write(traceback.format_exc())
Example #30
0
    def run_one_sweep(args):

      from xia2.Handlers.Streams import Debug

      assert len(args) == 3
      s, failover, job_type = args

      if job_type:
        DriverFactory.set_driver_type(job_type)

      Chatter.cache()
      Debug.cache()

      try:
        s.get_integrater_intensities()
      except Exception, e:
        if failover:
          Chatter.write('Processing sweep %s failed: %s' % \
                        (s.get_name(), str(e)))
          s = None
        else:
          raise
def write_citations():
    # tell the user which programs were used...
    Chatter.write("XIA2 used... %s" % ", ".join(Citations.get_programs()))
    Chatter.write(
        "Here are the appropriate citations (BIBTeX in xia2-citations.bib.)")

    for citation in Citations.get_citations_acta():
        Chatter.write(citation)

    # and write the bibtex versions
    out = open("xia2-citations.bib", "w")

    for citation in Citations.get_citations():
        out.write("%s\n" % citation)

    out.close()
Example #32
0
def write_citations():
    # tell the user which programs were used...
    Chatter.write('XIA2 used... %s' % \
        ', '.join(Citations.get_programs()))
    Chatter.write(
        'Here are the appropriate citations (BIBTeX in xia2-citations.bib.)')

    for citation in Citations.get_citations_acta():
        Chatter.write(citation)

    # and write the bibtex versions
    out = open('xia2-citations.bib', 'w')

    for citation in Citations.get_citations():
        out.write('%s\n' % citation)

    out.close()
Example #33
0
def run():
  try:
    check_environment()
  except Exception as e:
    traceback.print_exc(file = open('xia2.error', 'w'))
    Chatter.write('Status: error "%s"' % str(e))

  if len(sys.argv) < 2 or '-help' in sys.argv:
    help()
    sys.exit()

  wd = os.getcwd()

  try:
    from xia2.command_line.xia2_main import xia2_main
    xia2_main(stop_after='integrate')
    Chatter.write('Status: normal termination')

  except Exception as e:
    traceback.print_exc(file = open(os.path.join(wd, 'xia2.error'), 'w'))
    Chatter.write('Status: error "%s"' % str(e))
Example #34
0
File: XSweep.py Project: xia2/xia2
  def __init__(self, name,
               wavelength,
               sample,
               directory = None,
               image = None,
               beam = None,
               reversephi = False,
               distance = None,
               gain = 0.0,
               dmin = 0.0,
               dmax = 0.0,
               polarization = 0.0,
               frames_to_process = None,
               user_lattice = None,
               user_cell = None,
               epoch = 0,
               ice = False,
               excluded_regions = None):
    '''Create a new sweep named name, belonging to XWavelength object
    wavelength, representing the images in directory starting with image,
    with beam centre optionally defined.'''
    if excluded_regions is None:
      excluded_regions = []

    # + check the wavelength is an XWavelength object
    #   raise an exception if not... or not...

    if not wavelength.__class__.__name__ == 'XWavelength':
      pass

    # FIXME bug 2221 if DIRECTORY starts with ~/ or ~graeme (say) need to
    # interpret this properly - e.g. map it to a full PATH.

    directory = expand_path(directory)

    self._name = name
    self._wavelength = wavelength
    self._sample = sample
    self._directory = directory
    self._image = image
    self._reversephi = reversephi
    self._epoch = epoch
    self._user_lattice = user_lattice
    self._user_cell = user_cell
    self._header = { }
    self._resolution_high = dmin
    self._resolution_low = dmax
    self._ice = ice
    self._excluded_regions = excluded_regions
    self._imageset = None

    # FIXME in here also need to be able to accumulate the total
    # dose from all experimental measurements (complex) and provide
    # a _epoch_to_dose dictionary or some such... may be fiddly as
    # this will need to parse across multiple templates. c/f Bug # 2798

    self._epoch_to_image = { }
    self._image_to_epoch = { }

    # to allow first, last image for processing to be
    # set... c/f integrater interface
    self._frames_to_process = frames_to_process

    # + derive template, list of images

    params = PhilIndex.get_python_object()
    if directory and image:
      self._template, self._directory = \
                      image2template_directory(os.path.join(directory,
                                                            image))

      from xia2.Schema import load_imagesets
      imagesets = load_imagesets(
        self._template, self._directory, image_range=self._frames_to_process,
        reversephi=(params.xia2.settings.input.reverse_phi or self._reversephi))

      assert len(imagesets) == 1, "one imageset expected, %d found" % \
          len(imagesets)
      self._imageset = copy.deepcopy(imagesets[0])
      start, end = self._imageset.get_array_range()
      self._images = list(range(start+1, end+1))

      # FIXME in here check that (1) the list of images is continuous
      # and (2) that all of the images are readable. This should also
      # take into account frames_to_process if set.

      if self._frames_to_process is None:
        self._frames_to_process = min(self._images), max(self._images)

      start, end = self._frames_to_process

      error = False

      if params.general.check_image_files_readable:
        for j in range(start, end + 1):
          image_name = self.get_imageset().get_path(j-start)
          if not j in self._images:
            Debug.write('image %s missing' %image_name)
            error = True
            continue
          if not os.access(image_name, os.R_OK):
            Debug.write('image %s unreadable' %image_name)
            error = True
            continue

        if error:
          raise RuntimeError, 'problem with sweep %s' % self._name

      # + read the image header information into here?
      #   or don't I need it? it would be useful for checking
      #   against wavelength.getWavelength() I guess to make
      #   sure that the plumbing is all sound.

      # check that they match by closer than 0.0001A, if wavelength
      # is not None

      beam_ = self._imageset.get_beam()
      scan = self._imageset.get_scan()
      if wavelength is not None:

        # FIXME 29/NOV/06 if the wavelength wavelength value
        # is 0.0 then first set it to the header value - note
        # that this assumes that the header value is correct
        # (a reasonable assumption)

        if wavelength.get_wavelength() == 0.0:
          wavelength.set_wavelength(beam_.get_wavelength())

        # FIXME 08/DEC/06 in here need to allow for the fact
        # that the wavelength in the image header could be wrong and
        # in fact it should be replaced with the input value -
        # through the user will need to be warned of this and
        # also everything using the FrameProcessor interface
        # will also have to respect this!

        if math.fabs(beam_.get_wavelength() -
                     wavelength.get_wavelength()) > 0.0001:
          # format = 'wavelength for sweep %s does not ' + \
          # 'match wavelength %s'
          # raise RuntimeError, format  % \
          # (name, wavelength.get_name())

          format = 'Header wavelength for sweep %s different' + \
                   ' to assigned value (%4.2f vs. %4.2f)'

          Chatter.write(format % (name, beam_.get_wavelength(),
                                  wavelength.get_wavelength()))


      # also in here look at the image headers to see if we can
      # construct a mapping between exposure epoch and image ...

      images = []

      if self._frames_to_process:
        start, end = self._frames_to_process
        for j in self._images:
          if j >= start and j <= end:
            images.append(j)
      else:
        images = self._images

      for j in images:
        epoch = scan.get_image_epoch(j)
        if epoch == 0.0:
          epoch = float(os.stat(self._imageset.get_path(j-images[0])).st_mtime)
        self._epoch_to_image[epoch] = j
        self._image_to_epoch[j] = epoch

      epochs = self._epoch_to_image.keys()

      Debug.write('Exposure epoch for sweep %s: %d %d' % \
                  (self._template, min(epochs), max(epochs)))

    self._input_imageset = copy.deepcopy(self._imageset)

    # + get the lattice - can this be a pointer, so that when
    #   this object updates lattice it is globally-for-this-crystal
    #   updated? The lattice included directly in here includes an
    #   exact unit cell for data reduction, the crystal lattice
    #   contains an approximate unit cell which should be
    #   from the unit cells from all sweeps contained in the
    #   XCrystal. FIXME should I be using a LatticeInfo object
    #   in here? See what the Indexer interface produces. ALT:
    #   just provide an Indexer implementation "hook".
    #   See Headnote 001 above. See also _get_indexer,
    #   _get_integrater below.

    self._indexer = None
    self._refiner = None
    self._integrater = None

    # I don't need this - it is equivalent to self.getWavelength(
    # ).getCrystal().getLattice()
    # self._crystal_lattice = None

    # this means that this module will have to present largely the
    # same interface as Indexer and Integrater so that the calls
    # can be appropriately forwarded.

    # finally configure the beam if set

    if beam is not None:
      from dxtbx.model.detector_helpers import set_mosflm_beam_centre
      try:
        set_mosflm_beam_centre(self.get_imageset().get_detector(),
                               self.get_imageset().get_beam(),
                               beam)
      except AssertionError, e:
        Debug.write('Error setting mosflm beam centre: %s' % e)
Example #35
0
        def run(self):
            '''Run labelit.index'''

            assert len(self._images) > 0
            self._images.sort()

            if self._max_cell is None and len(self._images) > 4:
                raise RuntimeError('cannot use more than 4 images')

            #task = 'Autoindex from images:'

            #for i in _images:
            #task += ' %s' % self.get_image_name(i)

            #self.set_task(task)

            self.add_command_line('--index_only')

            for image in self._images:
                self.add_command_line(image)

            if self._space_group_number is not None:
                self.add_command_line('known_symmetry=%d' %
                                      self._space_group_number)

            if self._primitive_unit_cell is not None:
                self.add_command_line('target_cell=%f,%f,%f,%f,%f,%f' %
                                      tuple(self._primitive_unit_cell))

            if self._max_cell is not None:
                self.add_command_line('codecamp.maxcell=%f' % self._max_cell)

            self._write_dataset_preferences(len(self._images))

            shutil.copyfile(os.path.join(self.get_working_directory(),
                                         'dataset_preferences.py'),
                            os.path.join(self.get_working_directory(),
                                         '%d_dataset_preferences.py' % \
                                         self.get_xpid()))

            self.start()
            self.close_wait()

            #sweep = self.get_indexer_sweep_name()
            #FileHandler.record_log_file(
            #'%s INDEX' % (sweep), self.get_log_file())

            # check for errors
            self.check_for_errors()

            # check for labelit errors - if something went wrong, then
            # try to address it by e.g. extending the beam search area...

            self.check_labelit_errors()

            # ok now we're done, let's look through for some useful stuff
            output = self.get_all_output()

            counter = 0

            # FIXME 03/NOV/06 something to do with the new centre search...

            # example output:

            # Beam center is not immediately clear; rigorously retesting \
            #                                             2 solutions
            # Beam x 109.0 y 105.1, initial score 538; refined rmsd: 0.1969
            # Beam x 108.8 y 106.1, initial score 354; refined rmsd: 0.1792

            # in here want to parse the beam centre search if it was done,
            # and check that the highest scoring solution was declared
            # the "best" - though should also have a check on the
            # R.M.S. deviation of that solution...

            # do this first!

            for j in range(len(output)):
                o = output[j]
                if 'Beam centre is not immediately clear' in o:
                    # read the solutions that it has found and parse the
                    # information

                    centres = []
                    scores = []
                    rmsds = []

                    num_solutions = int(o.split()[-2])

                    for n in range(num_solutions):
                        record = output[j + n + 1].replace(',', ' ').replace(
                            ';', ' ').split()
                        x, y = float(record[2]), \
                               float(record[4])

                        centres.append((x, y))
                        scores.append(int(record[7]))
                        rmsds.append(float(record[-1]))

                    # next perform some analysis and perhaps assert the
                    # correct solution - for the moment just raise a warning
                    # if it looks like wrong solution may have been picked

                    best_beam_score = (0.0, 0.0, 0)
                    best_beam_rms = (0.0, 0.0, 1.0e8)

                    for n in range(num_solutions):
                        beam = centres[n]
                        score = scores[n]
                        rmsd = rmsds[n]

                        if score > best_beam_score[2]:
                            best_beam_score = (beam[0], beam[1], score)

                        if rmsd < best_beam_rmsd[2]:
                            best_beam_rmsd = (beam[0], beam[1], rmsd)

                    # allow a difference of 0.1mm in either direction...
                    if math.fabs(
                        best_beam_score[0] -
                        best_beam_rmsd[0]) > 0.1 or \
                        math.fabs(best_beam_score[1] -
                                  best_beam_rmsd[1]) > 0.1:
                        Chatter.write(
                            'Labelit may have picked the wrong beam centre')

                        # FIXME as soon as I get the indexing loop
                        # structure set up, this should reset the
                        # indexing done flag, set the search range to
                        # 0, correct beam and then return...

                        # should also allow for the possibility that
                        # labelit has selected the best solution - so this
                        # will need to remember the stats for this solution,
                        # then compare them against the stats (one day) from
                        # running with the other solution - eventually the
                        # correct solution will result...

            for o in output:
                l = o.split()

                if l[:3] == ['Beam', 'center', 'x']:
                    x = float(l[3].replace('mm,', ''))
                    y = float(l[5].replace('mm,', ''))

                    self._mosflm_beam_centre = (x, y)
                    self._mosflm_detector_distance = float(l[7].replace(
                        'mm', ''))
                    #self.set_indexer_beam_centre((x, y))
                    #self.set_indexer_distance(float(l[7].replace('mm', '')))

                    self._mosaic = float(l[10].replace('mosaicity=', ''))

                if l[:3] == ['Solution', 'Metric', 'fit']:
                    break

                counter += 1

            # if we've just broken out (counter < len(output)) then
            # we need to gather the output

            if counter >= len(output):
                raise RuntimeError('error in indexing')

            # FIXME this needs to check the smilie status e.g.
            # ":)" or ";(" or "  ".

            for i in range(counter + 1, len(output)):
                o = output[i][3:]
                smiley = output[i][:3]
                l = o.split()
                if l:

                    self._solutions[int(l[0])] = {
                        'number': int(l[0]),
                        'mosaic': self._mosaic,
                        'metric': float(l[1]),
                        'rmsd': float(l[3]),
                        'nspots': int(l[4]),
                        'lattice': l[6],
                        'cell': map(float, l[7:13]),
                        'volume': int(l[-1]),
                        'smiley': smiley
                    }

            # remove clearly incorrect solutions i.e. rmsd >> rmsd for P1 i.e. factor
            # of 4.0 or more...

            for s in sorted(self._solutions):
                if self._solutions[s][
                        'rmsd'] > 4.0 * self._solutions[1]['rmsd']:
                    del (self._solutions[s])

            return 'ok'
Example #36
0
 def print_command_line(self):
     cl = self.get_command_line()
     Chatter.write("Command line: %s" % cl)
Example #37
0
  def get_output(self):
    result = 'Wavelength name: %s\n' % self._name
    result += 'Wavelength %7.5f\n' % self._wavelength
    if self._f_pr != 0.0 and self._f_prpr != 0.0:
      result += 'F\', F\'\' = (%5.2f, %5.2f)\n' % (self._f_pr,
                                                   self._f_prpr)

    result += 'Sweeps:\n'

    from xia2.Driver.DriverFactory import DriverFactory

    def run_one_sweep(args):

      from xia2.Handlers.Streams import Debug

      assert len(args) == 3
      s, failover, job_type = args

      if job_type:
        DriverFactory.set_driver_type(job_type)

      Chatter.cache()
      Debug.cache()

      try:
        s.get_integrater_intensities()
      except Exception as e:
        if failover:
          Chatter.write('Processing sweep %s failed: %s' % \
                        (s.get_name(), str(e)))
          s = None
        else:
          raise
      finally:
        Chatter.uncache()
        Debug.uncache()
        return s

    remove = []

    from xia2.Handlers.Phil import PhilIndex
    params = PhilIndex.get_python_object()
    failover = params.xia2.settings.failover

    for s in self._sweeps:

      # would be nice to put this somewhere else in the hierarchy - not
      # sure how to do that though (should be handled in Interfaces?)

      try:
        result += '%s\n' % s.get_output()
      except Exception as e:
        if failover:
          Chatter.write('Processing sweep %s failed: %s' % \
                        (s.get_name(), str(e)))
          remove.append(s)
        else:
          raise

    for s in remove:
      self._sweeps.remove(s)

    return result[:-1]
Example #38
0
        def run(self):
            """Run labelit.index"""

            assert len(self._images) > 0
            self._images.sort()

            if self._max_cell is None and len(self._images) > 4:
                raise RuntimeError, "cannot use more than 4 images"

            # task = 'Autoindex from images:'

            # for i in _images:
            # task += ' %s' % self.get_image_name(i)

            # self.set_task(task)

            self.add_command_line("--index_only")

            for image in self._images:
                self.add_command_line(image)

            if self._space_group_number is not None:
                self.add_command_line("known_symmetry=%d" % self._space_group_number)

            if self._primitive_unit_cell is not None:
                self.add_command_line("target_cell=%f,%f,%f,%f,%f,%f" % tuple(self._primitive_unit_cell))

            if self._max_cell is not None:
                self.add_command_line("codecamp.maxcell=%f" % self._max_cell)

            self._write_dataset_preferences(len(self._images))

            shutil.copyfile(
                os.path.join(self.get_working_directory(), "dataset_preferences.py"),
                os.path.join(self.get_working_directory(), "%d_dataset_preferences.py" % self.get_xpid()),
            )

            self.start()
            self.close_wait()

            # sweep = self.get_indexer_sweep_name()
            # FileHandler.record_log_file(
            #'%s INDEX' % (sweep), self.get_log_file())

            # check for errors
            self.check_for_errors()

            # check for labelit errors - if something went wrong, then
            # try to address it by e.g. extending the beam search area...

            self.check_labelit_errors()

            # ok now we're done, let's look through for some useful stuff
            output = self.get_all_output()

            counter = 0

            # FIXME 03/NOV/06 something to do with the new centre search...

            # example output:

            # Beam center is not immediately clear; rigorously retesting \
            #                                             2 solutions
            # Beam x 109.0 y 105.1, initial score 538; refined rmsd: 0.1969
            # Beam x 108.8 y 106.1, initial score 354; refined rmsd: 0.1792

            # in here want to parse the beam centre search if it was done,
            # and check that the highest scoring solution was declared
            # the "best" - though should also have a check on the
            # R.M.S. deviation of that solution...

            # do this first!

            for j in range(len(output)):
                o = output[j]
                if "Beam centre is not immediately clear" in o:
                    # read the solutions that it has found and parse the
                    # information

                    centres = []
                    scores = []
                    rmsds = []

                    num_solutions = int(o.split()[-2])

                    for n in range(num_solutions):
                        record = output[j + n + 1].replace(",", " ").replace(";", " ").split()
                        x, y = float(record[2]), float(record[4])

                        centres.append((x, y))
                        scores.append(int(record[7]))
                        rmsds.append(float(record[-1]))

                    # next perform some analysis and perhaps assert the
                    # correct solution - for the moment just raise a warning
                    # if it looks like wrong solution may have been picked

                    best_beam_score = (0.0, 0.0, 0)
                    best_beam_rms = (0.0, 0.0, 1.0e8)

                    for n in range(num_solutions):
                        beam = centres[n]
                        score = scores[n]
                        rmsd = rmsds[n]

                        if score > best_beam_score[2]:
                            best_beam_score = (beam[0], beam[1], score)

                        if rmsd < best_beam_rmsd[2]:
                            best_beam_rmsd = (beam[0], beam[1], rmsd)

                    # allow a difference of 0.1mm in either direction...
                    if (
                        math.fabs(best_beam_score[0] - best_beam_rmsd[0]) > 0.1
                        or math.fabs(best_beam_score[1] - best_beam_rmsd[1]) > 0.1
                    ):
                        Chatter.write("Labelit may have picked the wrong beam centre")

                        # FIXME as soon as I get the indexing loop
                        # structure set up, this should reset the
                        # indexing done flag, set the search range to
                        # 0, correct beam and then return...

                        # should also allow for the possibility that
                        # labelit has selected the best solution - so this
                        # will need to remember the stats for this solution,
                        # then compare them against the stats (one day) from
                        # running with the other solution - eventually the
                        # correct solution will result...

            for o in output:
                l = o.split()

                if l[:3] == ["Beam", "center", "x"]:
                    x = float(l[3].replace("mm,", ""))
                    y = float(l[5].replace("mm,", ""))

                    self._mosflm_beam_centre = (x, y)
                    self._mosflm_detector_distance = float(l[7].replace("mm", ""))
                    # self.set_indexer_beam_centre((x, y))
                    # self.set_indexer_distance(float(l[7].replace('mm', '')))

                    self._mosaic = float(l[10].replace("mosaicity=", ""))

                if l[:3] == ["Solution", "Metric", "fit"]:
                    break

                counter += 1

            # if we've just broken out (counter < len(output)) then
            # we need to gather the output

            if counter >= len(output):
                raise RuntimeError, "error in indexing"

            # FIXME this needs to check the smilie status e.g.
            # ":)" or ";(" or "  ".

            for i in range(counter + 1, len(output)):
                o = output[i][3:]
                smiley = output[i][:3]
                l = o.split()
                if l:

                    self._solutions[int(l[0])] = {
                        "number": int(l[0]),
                        "mosaic": self._mosaic,
                        "metric": float(l[1]),
                        "rmsd": float(l[3]),
                        "nspots": int(l[4]),
                        "lattice": l[6],
                        "cell": map(float, l[7:13]),
                        "volume": int(l[-1]),
                        "smiley": smiley,
                    }

            # remove clearly incorrect solutions i.e. rmsd >> rmsd for P1 i.e. factor
            # of 4.0 or more...

            for s in sorted(self._solutions):
                if self._solutions[s]["rmsd"] > 4.0 * self._solutions[1]["rmsd"]:
                    del (self._solutions[s])

            return "ok"
Example #39
0
def xia2_main(stop_after=None):
    '''Actually process something...'''
    Citations.cite('xia2')

    # print versions of related software
    Chatter.write(dials_version())

    ccp4_version = get_ccp4_version()
    if ccp4_version is not None:
        Chatter.write('CCP4 %s' % ccp4_version)

    start_time = time.time()

    CommandLine = get_command_line()
    start_dir = Flags.get_starting_directory()

    # check that something useful has been assigned for processing...
    xtals = CommandLine.get_xinfo().get_crystals()

    no_images = True

    for name in xtals.keys():
        xtal = xtals[name]

        if not xtal.get_all_image_names():

            Chatter.write('-----------------------------------' + \
                          '-' * len(name))
            Chatter.write('| No images assigned for crystal %s |' % name)
            Chatter.write('-----------------------------------' + '-' \
                          * len(name))
        else:
            no_images = False

    args = []

    from xia2.Handlers.Phil import PhilIndex
    params = PhilIndex.get_python_object()
    mp_params = params.xia2.settings.multiprocessing
    njob = mp_params.njob

    from libtbx import group_args

    xinfo = CommandLine.get_xinfo()

    if os.path.exists('xia2.json'):
        from xia2.Schema.XProject import XProject
        xinfo_new = xinfo
        xinfo = XProject.from_json(filename='xia2.json')

        crystals = xinfo.get_crystals()
        crystals_new = xinfo_new.get_crystals()
        for crystal_id in crystals_new.keys():
            if crystal_id not in crystals:
                crystals[crystal_id] = crystals_new[crystal_id]
                continue
            crystals[crystal_id]._scaler = None  # reset scaler
            for wavelength_id in crystals_new[crystal_id].get_wavelength_names(
            ):
                wavelength_new = crystals_new[crystal_id].get_xwavelength(
                    wavelength_id)
                if wavelength_id not in crystals[
                        crystal_id].get_wavelength_names():
                    crystals[crystal_id].add_wavelength(
                        crystals_new[crystal_id].get_xwavelength(
                            wavelength_new))
                    continue
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                sweeps_new = wavelength_new.get_sweeps()
                sweeps = wavelength.get_sweeps()
                sweep_names = [s.get_name() for s in sweeps]
                sweep_keys = [(s.get_directory(), s.get_template(),
                               s.get_image_range()) for s in sweeps]
                for sweep in sweeps_new:
                    if ((sweep.get_directory(), sweep.get_template(),
                         sweep.get_image_range()) not in sweep_keys):
                        if sweep.get_name() in sweep_names:
                            i = 1
                            while 'SWEEEP%i' % i in sweep_names:
                                i += 1
                            sweep._name = 'SWEEP%i' % i
                            break
                        wavelength.add_sweep(
                            name=sweep.get_name(),
                            sample=sweep.get_xsample(),
                            directory=sweep.get_directory(),
                            image=sweep.get_image(),
                            beam=sweep.get_beam_centre(),
                            reversephi=sweep.get_reversephi(),
                            distance=sweep.get_distance(),
                            gain=sweep.get_gain(),
                            dmin=sweep.get_resolution_high(),
                            dmax=sweep.get_resolution_low(),
                            polarization=sweep.get_polarization(),
                            frames_to_process=sweep.get_frames_to_process(),
                            user_lattice=sweep.get_user_lattice(),
                            user_cell=sweep.get_user_cell(),
                            epoch=sweep._epoch,
                            ice=sweep._ice,
                            excluded_regions=sweep._excluded_regions,
                        )
                        sweep_names.append(sweep.get_name())

    crystals = xinfo.get_crystals()

    failover = params.xia2.settings.failover

    if mp_params.mode == 'parallel' and njob > 1:
        driver_type = mp_params.type
        command_line_args = CommandLine.get_argv()[1:]
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    sweep._get_indexer()
                    sweep._get_refiner()
                    sweep._get_integrater()
                    args.append((group_args(
                        driver_type=driver_type,
                        stop_after=stop_after,
                        failover=failover,
                        command_line_args=command_line_args,
                        nproc=mp_params.nproc,
                        crystal_id=crystal_id,
                        wavelength_id=wavelength_id,
                        sweep_id=sweep.get_name(),
                    ), ))

        from xia2.Driver.DriverFactory import DriverFactory
        default_driver_type = DriverFactory.get_driver_type()

        # run every nth job on the current computer (no need to submit to qsub)
        for i_job, arg in enumerate(args):
            if (i_job % njob) == 0:
                arg[0].driver_type = default_driver_type

        if mp_params.type == "qsub":
            method = "sge"
        else:
            method = "multiprocessing"
        nproc = mp_params.nproc
        qsub_command = mp_params.qsub_command
        if not qsub_command:
            qsub_command = 'qsub'
        qsub_command = '%s -V -cwd -pe smp %d' % (qsub_command, nproc)

        from libtbx import easy_mp
        results = easy_mp.parallel_map(
            process_one_sweep,
            args,
            processes=njob,
            #method=method,
            method="multiprocessing",
            qsub_command=qsub_command,
            preserve_order=True,
            preserve_exception_message=True)

        # Hack to update sweep with the serialized indexers/refiners/integraters
        i_sweep = 0
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                remove_sweeps = []
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    success, output, xsweep_dict = results[i_sweep]
                    if output is not None:
                        Chatter.write(output)
                    if not success:
                        Chatter.write('Sweep failed: removing %s' %
                                      sweep.get_name())
                        remove_sweeps.append(sweep)
                    else:
                        assert xsweep_dict is not None
                        Chatter.write('Loading sweep: %s' % sweep.get_name())
                        from xia2.Schema.XSweep import XSweep
                        new_sweep = XSweep.from_dict(xsweep_dict)
                        sweep._indexer = new_sweep._indexer
                        sweep._refiner = new_sweep._refiner
                        sweep._integrater = new_sweep._integrater
                    i_sweep += 1
                for sweep in remove_sweeps:
                    wavelength.remove_sweep(sweep)
                    sample = sweep.get_xsample()
                    sample.remove_sweep(sweep)

    else:
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                remove_sweeps = []
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    from dials.command_line.show import show_datablocks
                    from dxtbx.datablock import DataBlock
                    Debug.write(sweep.get_name())
                    Debug.write(
                        show_datablocks([DataBlock([sweep.get_imageset()])]))
                    try:
                        if stop_after == 'index':
                            sweep.get_indexer_cell()
                        else:
                            sweep.get_integrater_intensities()
                        sweep.serialize()
                    except Exception as e:
                        if failover:
                            Chatter.write('Processing sweep %s failed: %s' % \
                                          (sweep.get_name(), str(e)))
                            remove_sweeps.append(sweep)
                        else:
                            raise
                for sweep in remove_sweeps:
                    wavelength.remove_sweep(sweep)
                    sample = sweep.get_xsample()
                    sample.remove_sweep(sweep)

    # save intermediate xia2.json file in case scaling step fails
    xinfo.as_json(filename='xia2.json')

    if stop_after not in ('index', 'integrate'):
        Chatter.write(xinfo.get_output(), strip=False)

    for crystal in crystals.values():
        crystal.serialize()

    # save final xia2.json file in case report generation fails
    xinfo.as_json(filename='xia2.json')

    duration = time.time() - start_time

    # write out the time taken in a human readable way
    Chatter.write('Processing took %s' % \
                  time.strftime("%Hh %Mm %Ss", time.gmtime(duration)))

    if stop_after not in ('index', 'integrate'):
        # and the summary file
        with open('xia2-summary.dat', 'w') as fh:
            for record in xinfo.summarise():
                fh.write('%s\n' % record)

        # looks like this import overwrites the initial command line
        # Phil overrides so... for https://github.com/xia2/xia2/issues/150
        from xia2.command_line.html import generate_xia2_html

        if params.xia2.settings.small_molecule == True:
            params.xia2.settings.report.xtriage_analysis = False
            params.xia2.settings.report.include_radiation_damage = False

        generate_xia2_html(xinfo,
                           filename='xia2.html',
                           params=params.xia2.settings.report)

    write_citations()

    # delete all of the temporary mtz files...
    cleanup()
    Environment.cleanup()
Example #40
0
  def _scale_prepare(self):
    '''Perform all of the preparation required to deliver the scaled
    data. This should sort together the reflection files, ensure that
    they are correctly indexed (via pointless) and generally tidy
    things up.'''

    # acknowledge all of the programs we are about to use...

    Citations.cite('pointless')
    Citations.cite('aimless')
    Citations.cite('ccp4')

    # ---------- GATHER ----------

    self._sweep_handler = SweepInformationHandler(self._scalr_integraters)

    Journal.block(
        'gathering', self.get_scaler_xcrystal().get_name(), 'CCP4',
        {'working directory':self.get_working_directory()})

    for epoch in self._sweep_handler.get_epochs():
      si = self._sweep_handler.get_sweep_information(epoch)
      pname, xname, dname = si.get_project_info()
      sname = si.get_sweep_name()

      exclude_sweep = False

      for sweep in PhilIndex.params.xia2.settings.sweep:
        if sweep.id == sname and sweep.exclude:
          exclude_sweep = True
          break

      if exclude_sweep:
        self._sweep_handler.remove_epoch(epoch)
        Debug.write('Excluding sweep %s' %sname)
      else:
        Journal.entry({'adding data from':'%s/%s/%s' % \
                       (xname, dname, sname)})

    # gather data for all images which belonged to the parent
    # crystal - allowing for the fact that things could go wrong
    # e.g. epoch information not available, exposure times not in
    # headers etc...

    for e in self._sweep_handler.get_epochs():
      si = self._sweep_handler.get_sweep_information(e)
      assert is_mtz_file(si.get_reflections())

    p, x = self._sweep_handler.get_project_info()
    self._scalr_pname = p
    self._scalr_xname = x

    # verify that the lattices are consistent, calling eliminate if
    # they are not N.B. there could be corner cases here

    need_to_return = False

    multi_sweep_indexing = \
      PhilIndex.params.xia2.settings.developmental.multi_sweep_indexing


    if len(self._sweep_handler.get_epochs()) > 1:

      if multi_sweep_indexing and not self._scalr_input_pointgroup:
        pointless_hklins = []

        max_batches = 0
        for epoch in self._sweep_handler.get_epochs():
          si = self._sweep_handler.get_sweep_information(epoch)
          hklin = si.get_reflections()

          md = self._factory.Mtzdump()
          md.set_hklin(hklin)
          md.dump()

          batches = md.get_batches()
          if 1 + max(batches) - min(batches) > max_batches:
            max_batches = max(batches) - min(batches) + 1

          datasets = md.get_datasets()

          Debug.write('In reflection file %s found:' % hklin)
          for d in datasets:
            Debug.write('... %s' % d)

          dataset_info = md.get_dataset_info(datasets[0])

        from xia2.lib.bits import nifty_power_of_ten
        Debug.write('Biggest sweep has %d batches' % max_batches)
        max_batches = nifty_power_of_ten(max_batches)

        counter = 0

        for epoch in self._sweep_handler.get_epochs():
          si = self._sweep_handler.get_sweep_information(epoch)
          hklin = si.get_reflections()
          integrater = si.get_integrater()
          refiner = integrater.get_integrater_refiner()

          hklin = self._prepare_pointless_hklin(
            hklin, si.get_integrater().get_phi_width())

          rb = self._factory.Rebatch()

          hklout = os.path.join(self.get_working_directory(),
                                '%s_%s_%s_%s_prepointless.mtz' % \
                                (pname, xname, dname, si.get_sweep_name()))

          # we will want to delete this one exit
          FileHandler.record_temporary_file(hklout)

          first_batch = min(si.get_batches())
          si.set_batch_offset(counter * max_batches - first_batch + 1)

          rb.set_hklin(hklin)
          rb.set_first_batch(counter * max_batches + 1)
          rb.set_project_info(pname, xname, dname)
          rb.set_hklout(hklout)

          new_batches = rb.rebatch()

          pointless_hklins.append(hklout)

          # update the counter & recycle
          counter += 1

        s = self._factory.Sortmtz()

        pointless_hklin = os.path.join(self.get_working_directory(),
                              '%s_%s_prepointless_sorted.mtz' % \
                              (self._scalr_pname, self._scalr_xname))

        s.set_hklout(pointless_hklin)

        for hklin in pointless_hklins:
          s.add_hklin(hklin)

        s.sort()

        pointgroup, reindex_op, ntr, pt = \
                    self._pointless_indexer_jiffy(
            pointless_hklin, refiner)

        Debug.write('X1698: %s: %s' % (pointgroup, reindex_op))

        lattices = [Syminfo.get_lattice(pointgroup)]

        for epoch in self._sweep_handler.get_epochs():
          si = self._sweep_handler.get_sweep_information(epoch)
          intgr = si.get_integrater()
          hklin = si.get_reflections()
          refiner = intgr.get_integrater_refiner()

          if ntr:
            intgr.integrater_reset_reindex_operator()
            need_to_return = True

      else:
        lattices = []

        for epoch in self._sweep_handler.get_epochs():

          si = self._sweep_handler.get_sweep_information(epoch)
          intgr = si.get_integrater()
          hklin = si.get_reflections()
          refiner = intgr.get_integrater_refiner()

          if self._scalr_input_pointgroup:
            pointgroup = self._scalr_input_pointgroup
            reindex_op = 'h,k,l'
            ntr = False

          else:
            pointless_hklin = self._prepare_pointless_hklin(
              hklin, si.get_integrater().get_phi_width())

            pointgroup, reindex_op, ntr, pt = \
                        self._pointless_indexer_jiffy(
                pointless_hklin, refiner)

            Debug.write('X1698: %s: %s' % (pointgroup, reindex_op))

          lattice = Syminfo.get_lattice(pointgroup)

          if not lattice in lattices:
            lattices.append(lattice)

          if ntr:

            intgr.integrater_reset_reindex_operator()
            need_to_return = True

      if len(lattices) > 1:

        # why not using pointless indexer jiffy??!

        correct_lattice = sort_lattices(lattices)[0]

        Chatter.write('Correct lattice asserted to be %s' % \
                      correct_lattice)

        # transfer this information back to the indexers
        for epoch in self._sweep_handler.get_epochs():

          si = self._sweep_handler.get_sweep_information(epoch)
          refiner = si.get_integrater().get_integrater_refiner()
          sname = si.get_sweep_name()

          state = refiner.set_refiner_asserted_lattice(
              correct_lattice)

          if state == refiner.LATTICE_CORRECT:
            Chatter.write('Lattice %s ok for sweep %s' % \
                          (correct_lattice, sname))
          elif state == refiner.LATTICE_IMPOSSIBLE:
            raise RuntimeError, 'Lattice %s impossible for %s' \
                  % (correct_lattice, sname)
          elif state == refiner.LATTICE_POSSIBLE:
            Chatter.write('Lattice %s assigned for sweep %s' % \
                          (correct_lattice, sname))
            need_to_return = True

    # if one or more of them was not in the lowest lattice,
    # need to return here to allow reprocessing

    if need_to_return:
      self.set_scaler_done(False)
      self.set_scaler_prepare_done(False)
      return

    # ---------- REINDEX ALL DATA TO CORRECT POINTGROUP ----------

    # all should share the same pointgroup, unless twinned... in which
    # case force them to be...

    pointgroups = { }
    reindex_ops = { }
    probably_twinned = False

    need_to_return = False

    multi_sweep_indexing = \
      PhilIndex.params.xia2.settings.developmental.multi_sweep_indexing

    if multi_sweep_indexing and not self._scalr_input_pointgroup:
      pointless_hklins = []

      max_batches = 0
      for epoch in self._sweep_handler.get_epochs():
        si = self._sweep_handler.get_sweep_information(epoch)
        hklin = si.get_reflections()

        md = self._factory.Mtzdump()
        md.set_hklin(hklin)
        md.dump()

        batches = md.get_batches()
        if 1 + max(batches) - min(batches) > max_batches:
          max_batches = max(batches) - min(batches) + 1

        datasets = md.get_datasets()

        Debug.write('In reflection file %s found:' % hklin)
        for d in datasets:
          Debug.write('... %s' % d)

        dataset_info = md.get_dataset_info(datasets[0])

      from xia2.lib.bits import nifty_power_of_ten
      Debug.write('Biggest sweep has %d batches' % max_batches)
      max_batches = nifty_power_of_ten(max_batches)

      counter = 0

      for epoch in self._sweep_handler.get_epochs():
        si = self._sweep_handler.get_sweep_information(epoch)
        hklin = si.get_reflections()
        integrater = si.get_integrater()
        refiner = integrater.get_integrater_refiner()

        hklin = self._prepare_pointless_hklin(
            hklin, si.get_integrater().get_phi_width())

        rb = self._factory.Rebatch()

        hklout = os.path.join(self.get_working_directory(),
                              '%s_%s_%s_%s_prepointless.mtz' % \
                              (pname, xname, dname, si.get_sweep_name()))

        # we will want to delete this one exit
        FileHandler.record_temporary_file(hklout)

        first_batch = min(si.get_batches())
        si.set_batch_offset(counter * max_batches - first_batch + 1)

        rb.set_hklin(hklin)
        rb.set_first_batch(counter * max_batches + 1)
        rb.set_project_info(pname, xname, dname)
        rb.set_hklout(hklout)

        new_batches = rb.rebatch()

        pointless_hklins.append(hklout)

        # update the counter & recycle
        counter += 1

      s = self._factory.Sortmtz()

      pointless_hklin = os.path.join(self.get_working_directory(),
                            '%s_%s_prepointless_sorted.mtz' % \
                            (self._scalr_pname, self._scalr_xname))

      s.set_hklout(pointless_hklin)

      for hklin in pointless_hklins:
        s.add_hklin(hklin)

      s.sort()

      pointgroup, reindex_op, ntr, pt = \
                  self._pointless_indexer_jiffy(
          pointless_hklin, refiner)

      for epoch in self._sweep_handler.get_epochs():
        pointgroups[epoch] = pointgroup
        reindex_ops[epoch] = reindex_op

    else:
      for epoch in self._sweep_handler.get_epochs():
        si = self._sweep_handler.get_sweep_information(epoch)

        hklin = si.get_reflections()
        #hklout = os.path.join(
            #self.get_working_directory(),
            #os.path.split(hklin)[-1].replace('.mtz', '_rdx.mtz'))

        #FileHandler.record_temporary_file(hklout)

        integrater = si.get_integrater()
        refiner = integrater.get_integrater_refiner()

        if self._scalr_input_pointgroup:
          Debug.write('Using input pointgroup: %s' % \
                      self._scalr_input_pointgroup)
          pointgroup = self._scalr_input_pointgroup
          reindex_op = 'h,k,l'
          pt = False

        else:

          pointless_hklin = self._prepare_pointless_hklin(
              hklin, si.get_integrater().get_phi_width())

          pointgroup, reindex_op, ntr, pt = \
                      self._pointless_indexer_jiffy(
              pointless_hklin, refiner)

          Debug.write('X1698: %s: %s' % (pointgroup, reindex_op))

          if ntr:

            integrater.integrater_reset_reindex_operator()
            need_to_return = True

        if pt and not probably_twinned:
          probably_twinned = True

        Debug.write('Pointgroup: %s (%s)' % (pointgroup, reindex_op))

        pointgroups[epoch] = pointgroup
        reindex_ops[epoch] = reindex_op

    overall_pointgroup = None

    pointgroup_set = set([pointgroups[e] for e in pointgroups])

    if len(pointgroup_set) > 1 and \
       not probably_twinned:
      raise RuntimeError, 'non uniform pointgroups'

    if len(pointgroup_set) > 1:
      Debug.write('Probably twinned, pointgroups: %s' % \
                  ' '.join([p.replace(' ', '') for p in \
                            list(pointgroup_set)]))
      numbers = [Syminfo.spacegroup_name_to_number(s) for s in \
                 pointgroup_set]
      overall_pointgroup = Syminfo.spacegroup_number_to_name(
          min(numbers))
      self._scalr_input_pointgroup = overall_pointgroup

      Chatter.write('Twinning detected, assume pointgroup %s' % \
                    overall_pointgroup)

      need_to_return = True

    else:
      overall_pointgroup = pointgroup_set.pop()

    for epoch in self._sweep_handler.get_epochs():
      si = self._sweep_handler.get_sweep_information(epoch)

      integrater = si.get_integrater()

      integrater.set_integrater_spacegroup_number(
          Syminfo.spacegroup_name_to_number(overall_pointgroup))
      integrater.set_integrater_reindex_operator(
          reindex_ops[epoch], reason='setting point group')
      # This will give us the reflections in the correct point group
      si.set_reflections(integrater.get_integrater_intensities())

    if need_to_return:
      self.set_scaler_done(False)
      self.set_scaler_prepare_done(False)
      return

    # in here now optinally work through the data files which should be
    # indexed with a consistent point group, and transform the orientation
    # matrices by the lattice symmetry operations (if possible) to get a
    # consistent definition of U matrix modulo fixed rotations

    if PhilIndex.params.xia2.settings.unify_setting:

      from scitbx.matrix import sqr
      reference_U = None
      i3 = sqr((1, 0, 0, 0, 1, 0, 0, 0, 1))

      for epoch in self._sweep_handler.get_epochs():
        si = self._sweep_handler.get_sweep_information(epoch)
        intgr = si.get_integrater()
        fixed = sqr(intgr.get_goniometer().get_fixed_rotation())
        u, b, s = get_umat_bmat_lattice_symmetry_from_mtz(si.get_reflections())
        U = fixed.inverse() * sqr(u).transpose()
        B = sqr(b)

        if reference_U is None:
          reference_U = U
          continue

        results = []
        for op in s.all_ops():
          R = B * sqr(op.r().as_double()).transpose() * B.inverse()
          nearly_i3 = (U * R).inverse() * reference_U
          score = sum([abs(_n - _i) for (_n, _i) in zip(nearly_i3, i3)])
          results.append((score, op.r().as_hkl(), op))

        results.sort()
        best = results[0]
        Debug.write('Best reindex: %s %.3f' % (best[1], best[0]))
        intgr.set_integrater_reindex_operator(best[2].r().inverse().as_hkl(),
                                              reason='unifying [U] setting')
        si.set_reflections(intgr.get_integrater_intensities())

        # recalculate to verify
        u, b, s = get_umat_bmat_lattice_symmetry_from_mtz(si.get_reflections())
        U = fixed.inverse() * sqr(u).transpose()
        Debug.write('New reindex: %s' % (U.inverse() * reference_U))

        # FIXME I should probably raise an exception at this stage if this
        # is not about I3...

    if self.get_scaler_reference_reflection_file():
      self._reference = self.get_scaler_reference_reflection_file()
      Debug.write('Using HKLREF %s' % self._reference)

    elif Flags.get_reference_reflection_file():
      self._reference = Flags.get_reference_reflection_file()
      Debug.write('Using HKLREF %s' % self._reference)

    params = PhilIndex.params
    use_brehm_diederichs = params.xia2.settings.use_brehm_diederichs
    if len(self._sweep_handler.get_epochs()) > 1 and use_brehm_diederichs:

      brehm_diederichs_files_in = []
      for epoch in self._sweep_handler.get_epochs():

        si = self._sweep_handler.get_sweep_information(epoch)
        hklin = si.get_reflections()
        brehm_diederichs_files_in.append(hklin)

      # now run cctbx.brehm_diederichs to figure out the indexing hand for
      # each sweep
      from xia2.Wrappers.Cctbx.BrehmDiederichs import BrehmDiederichs
      from xia2.lib.bits import auto_logfiler
      brehm_diederichs = BrehmDiederichs()
      brehm_diederichs.set_working_directory(self.get_working_directory())
      auto_logfiler(brehm_diederichs)
      brehm_diederichs.set_input_filenames(brehm_diederichs_files_in)
      # 1 or 3? 1 seems to work better?
      brehm_diederichs.set_asymmetric(1)
      brehm_diederichs.run()
      reindexing_dict = brehm_diederichs.get_reindexing_dict()

      for epoch in self._sweep_handler.get_epochs():

        si = self._sweep_handler.get_sweep_information(epoch)
        intgr = si.get_integrater()
        hklin = si.get_reflections()

        reindex_op = reindexing_dict.get(os.path.abspath(hklin))
        assert reindex_op is not None

        if 1 or reindex_op != 'h,k,l':
          # apply the reindexing operator
          intgr.set_integrater_reindex_operator(
            reindex_op, reason='match reference')
          si.set_reflections(intgr.get_integrater_intensities())

    elif len(self._sweep_handler.get_epochs()) > 1 and \
           not self._reference:

      first = self._sweep_handler.get_epochs()[0]
      si = self._sweep_handler.get_sweep_information(first)
      self._reference = si.get_reflections()

    if self._reference:

      md = self._factory.Mtzdump()
      md.set_hklin(self._reference)
      md.dump()

      if md.get_batches() and False:
        raise RuntimeError, 'reference reflection file %s unmerged' % \
              self._reference

      datasets = md.get_datasets()

      if len(datasets) > 1 and False:
        raise RuntimeError, 'more than one dataset in %s' % \
              self._reference

      # then get the unit cell, lattice etc.

      reference_lattice = Syminfo.get_lattice(md.get_spacegroup())
      reference_cell = md.get_dataset_info(datasets[0])['cell']

      # then compute the pointgroup from this...

      # ---------- REINDEX TO CORRECT (REFERENCE) SETTING ----------

      for epoch in self._sweep_handler.get_epochs():
        pl = self._factory.Pointless()

        si = self._sweep_handler.get_sweep_information(epoch)
        hklin = si.get_reflections()

        pl.set_hklin(self._prepare_pointless_hklin(
            hklin, si.get_integrater().get_phi_width()))

        hklout = os.path.join(
            self.get_working_directory(),
            '%s_rdx2.mtz' % os.path.split(hklin)[-1][:-4])

        # we will want to delete this one exit
        FileHandler.record_temporary_file(hklout)

        # now set the initial reflection set as a reference...

        pl.set_hklref(self._reference)

        # write a pointless log file...
        pl.decide_pointgroup()

        Debug.write('Reindexing analysis of %s' % pl.get_hklin())

        pointgroup = pl.get_pointgroup()
        reindex_op = pl.get_reindex_operator()

        Debug.write('Operator: %s' % reindex_op)

        # apply this...

        integrater = si.get_integrater()

        integrater.set_integrater_reindex_operator(reindex_op,
                                                   reason='match reference')
        integrater.set_integrater_spacegroup_number(
            Syminfo.spacegroup_name_to_number(pointgroup))
        si.set_reflections(integrater.get_integrater_intensities())

        md = self._factory.Mtzdump()
        md.set_hklin(si.get_reflections())
        md.dump()

        datasets = md.get_datasets()

        if len(datasets) > 1:
          raise RuntimeError, 'more than one dataset in %s' % \
                si.get_reflections()

        # then get the unit cell, lattice etc.

        lattice = Syminfo.get_lattice(md.get_spacegroup())
        cell = md.get_dataset_info(datasets[0])['cell']

        if lattice != reference_lattice:
          raise RuntimeError, 'lattices differ in %s and %s' % \
                (self._reference, si.get_reflections())

        for j in range(6):
          if math.fabs((cell[j] - reference_cell[j]) /
                       reference_cell[j]) > 0.1:
            raise RuntimeError, \
                  'unit cell parameters differ in %s and %s' % \
                  (self._reference, si.get_reflections())

    # ---------- SORT TOGETHER DATA ----------

    self._sort_together_data_ccp4()

    self._scalr_resolution_limits = { }

    # store central resolution limit estimates

    batch_ranges = [self._sweep_handler.get_sweep_information(
        epoch).get_batch_range() for epoch in
                    self._sweep_handler.get_epochs()]

    self._resolution_limit_estimates = erzatz_resolution(
        self._prepared_reflections, batch_ranges)


    return
Example #41
0
  def _scale(self):
    '''Actually scale all of the data together.'''

    from xia2.Handlers.Environment import debug_memory_usage
    debug_memory_usage()

    Journal.block(
        'scaling', self.get_scaler_xcrystal().get_name(), 'XSCALE',
        {'scaling model':'default (all)'})

    epochs = self._sweep_information.keys()
    epochs.sort()

    xscale = self.XScale()

    xscale.set_spacegroup_number(self._xds_spacegroup)
    xscale.set_cell(self._scalr_cell)

    Debug.write('Set CELL: %.2f %.2f %.2f %.2f %.2f %.2f' % \
                tuple(self._scalr_cell))
    Debug.write('Set SPACEGROUP_NUMBER: %d' % \
                self._xds_spacegroup)

    Debug.write('Gathering measurements for scaling')

    for epoch in epochs:

      # get the prepared reflections
      reflections = self._sweep_information[epoch][
          'prepared_reflections']

      # and the get wavelength that this belongs to
      dname = self._sweep_information[epoch]['dname']
      sname = self._sweep_information[epoch]['sname']

      # and the resolution range for the reflections
      intgr = self._sweep_information[epoch]['integrater']
      Debug.write('Epoch: %d' % epoch)
      Debug.write('HKL: %s (%s/%s)' % (reflections, dname, sname))

      resolution_low = intgr.get_integrater_low_resolution()
      resolution_high, _ = self._scalr_resolution_limits.get((dname, sname), (0.0, None))

      resolution = (resolution_high, resolution_low)

      xscale.add_reflection_file(reflections, dname, resolution)

    # set the global properties of the sample
    xscale.set_crystal(self._scalr_xname)
    xscale.set_anomalous(self._scalr_anomalous)

    debug_memory_usage()
    xscale.run()

    scale_factor = xscale.get_scale_factor()

    Debug.write('XSCALE scale factor found to be: %e' % scale_factor)

    # record the log file

    pname = self._scalr_pname
    xname = self._scalr_xname

    FileHandler.record_log_file('%s %s XSCALE' % \
                                (pname, xname),
                                os.path.join(self.get_working_directory(),
                                             'XSCALE.LP'))

    # check for outlier reflections and if a number are found
    # then iterate (that is, rerun XSCALE, rejecting these outliers)

    if not PhilIndex.params.dials.fast_mode and not PhilIndex.params.xds.keep_outliers:
      xscale_remove = xscale.get_remove()
      if xscale_remove:
        current_remove = []
        final_remove = []

        # first ensure that there are no duplicate entries...
        if os.path.exists(os.path.join(
            self.get_working_directory(),
            'REMOVE.HKL')):
          for line in open(os.path.join(
              self.get_working_directory(),
              'REMOVE.HKL'), 'r').readlines():
            h, k, l = map(int, line.split()[:3])
            z = float(line.split()[3])

            if not (h, k, l, z) in current_remove:
              current_remove.append((h, k, l, z))

          for c in xscale_remove:
            if c in current_remove:
              continue
            final_remove.append(c)

          Debug.write(
              '%d alien reflections are already removed' % \
              (len(xscale_remove) - len(final_remove)))

        else:
          # we want to remove all of the new dodgy reflections
          final_remove = xscale_remove

        remove_hkl = open(os.path.join(
            self.get_working_directory(),
            'REMOVE.HKL'), 'w')

        z_min = PhilIndex.params.xds.z_min
        rejected = 0

        # write in the old reflections
        for remove in current_remove:
          z = remove[3]
          if z >= z_min:
            remove_hkl.write('%d %d %d %f\n' % remove)
          else:
            rejected += 1
        Debug.write('Wrote %d old reflections to REMOVE.HKL' % \
                    (len(current_remove) - rejected))
        Debug.write('Rejected %d as z < %f' % \
                    (rejected, z_min))

        # and the new reflections
        rejected = 0
        used = 0
        for remove in final_remove:
          z = remove[3]
          if z >= z_min:
            used += 1
            remove_hkl.write('%d %d %d %f\n' % remove)
          else:
            rejected += 1
        Debug.write('Wrote %d new reflections to REMOVE.HKL' % \
                    (len(final_remove) - rejected))
        Debug.write('Rejected %d as z < %f' % \
                    (rejected, z_min))

        remove_hkl.close()

        # we want to rerun the finishing step so...
        # unless we have added no new reflections
        if used:
          self.set_scaler_done(False)

    if not self.get_scaler_done():
      Chatter.write('Excluding outlier reflections Z > %.2f' %
                    PhilIndex.params.xds.z_min)
      return

    debug_memory_usage()

    # now get the reflection files out and merge them with aimless

    output_files = xscale.get_output_reflection_files()
    wavelength_names = output_files.keys()

    # these are per wavelength - also allow for user defined resolution
    # limits a la bug # 3183. No longer...

    for epoch in self._sweep_information.keys():

      input = self._sweep_information[epoch]

      intgr = input['integrater']

      rkey = input['dname'], input['sname']

      if intgr.get_integrater_user_resolution():
        dmin = intgr.get_integrater_high_resolution()

        if rkey not in self._user_resolution_limits:
          self._scalr_resolution_limits[rkey] = (dmin, None)
          self._user_resolution_limits[rkey] = dmin
        elif dmin < self._user_resolution_limits[rkey]:
          self._scalr_resolution_limits[rkey] = (dmin, None)
          self._user_resolution_limits[rkey] = dmin

    self._scalr_scaled_refl_files = { }

    self._scalr_statistics = { }

    max_batches = 0
    mtz_dict = { }

    project_info = { }
    for epoch in self._sweep_information.keys():
      pname = self._scalr_pname
      xname = self._scalr_xname
      dname = self._sweep_information[epoch]['dname']
      reflections = os.path.split(
          self._sweep_information[epoch]['prepared_reflections'])[-1]
      project_info[reflections] = (pname, xname, dname)

    for epoch in self._sweep_information.keys():
      self._sweep_information[epoch]['scaled_reflections'] = None

    debug_memory_usage()

    for wavelength in wavelength_names:
      hklin = output_files[wavelength]

      xsh = XDSScalerHelper()
      xsh.set_working_directory(self.get_working_directory())

      ref = xsh.split_and_convert_xscale_output(
          hklin, 'SCALED_', project_info, 1.0 / scale_factor)

      for hklout in ref.keys():
        for epoch in self._sweep_information.keys():
          if os.path.split(self._sweep_information[epoch][
              'prepared_reflections'])[-1] == \
              os.path.split(hklout)[-1]:
            if self._sweep_information[epoch][
                'scaled_reflections'] is not None:
              raise RuntimeError, 'duplicate entries'
            self._sweep_information[epoch][
                'scaled_reflections'] = ref[hklout]

      del(xsh)

    debug_memory_usage()

    for epoch in self._sweep_information.keys():
      hklin = self._sweep_information[epoch]['scaled_reflections']
      dname = self._sweep_information[epoch]['dname']
      sname = self._sweep_information[epoch]['sname']

      hkl_copy = os.path.join(self.get_working_directory(),
                              'R_%s' % os.path.split(hklin)[-1])

      if not os.path.exists(hkl_copy):
        shutil.copyfile(hklin, hkl_copy)

      # let's properly listen to the user's resolution limit needs...

      if self._user_resolution_limits.get((dname, sname), False):
        resolution = self._user_resolution_limits[(dname, sname)]

      else:
        if PhilIndex.params.xia2.settings.resolution.keep_all_reflections == True:
          try:
            resolution = intgr.get_detector().get_max_resolution(intgr.get_beam_obj().get_s0())
            Debug.write('keep_all_reflections set, using detector limits')
          except Exception:
            resolution = self._estimate_resolution_limit(hklin)
        else:
          resolution = self._estimate_resolution_limit(hklin)

      Chatter.write('Resolution for sweep %s/%s: %.2f' % \
                    (dname, sname, resolution))

      if (dname, sname) not in self._scalr_resolution_limits:
        self._scalr_resolution_limits[(dname, sname)] = (resolution, None)
        self.set_scaler_done(False)
      else:
        if resolution < self._scalr_resolution_limits[(dname, sname)][0]:
          self._scalr_resolution_limits[(dname, sname)] = (resolution, None)
          self.set_scaler_done(False)

    debug_memory_usage()

    if not self.get_scaler_done():
      Debug.write('Returning as scaling not finished...')
      return

    self._sort_together_data_xds()

    highest_resolution = min(limit for limit, _ in self._scalr_resolution_limits.values())

    self._scalr_highest_resolution = highest_resolution

    Debug.write('Scaler highest resolution set to %5.2f' % \
                highest_resolution)

    if not self.get_scaler_done():
      Debug.write('Returning as scaling not finished...')
      return

    sdadd_full = 0.0
    sdb_full = 0.0

    # ---------- FINAL MERGING ----------

    sc = self._factory.Aimless()

    FileHandler.record_log_file('%s %s aimless' % (self._scalr_pname,
                                                   self._scalr_xname),
                                sc.get_log_file())

    sc.set_resolution(highest_resolution)
    sc.set_hklin(self._prepared_reflections)
    sc.set_new_scales_file('%s_final.scales' % self._scalr_xname)

    if sdadd_full == 0.0 and sdb_full == 0.0:
      pass
    else:
      sc.add_sd_correction('both', 1.0, sdadd_full, sdb_full)

    for epoch in epochs:
      input = self._sweep_information[epoch]
      start, end = (min(input['batches']), max(input['batches']))

      rkey = input['dname'], input['sname']
      run_resolution_limit, _ = self._scalr_resolution_limits[rkey]

      sc.add_run(start, end, exclude = False,
                 resolution = run_resolution_limit,
                 name = input['sname'])

    sc.set_hklout(os.path.join(self.get_working_directory(),
                               '%s_%s_scaled.mtz' % \
                               (self._scalr_pname, self._scalr_xname)))

    if self.get_scaler_anomalous():
      sc.set_anomalous()

    sc.multi_merge()

    FileHandler.record_xml_file('%s %s aimless xml' % (self._scalr_pname,
                                                       self._scalr_xname),
                                sc.get_xmlout())
    data = sc.get_summary()

    loggraph = sc.parse_ccp4_loggraph()

    standard_deviation_info = { }

    for key in loggraph.keys():
      if 'standard deviation v. Intensity' in key:
        dataset = key.split(',')[-1].strip()
        standard_deviation_info[dataset] = transpose_loggraph(
            loggraph[key])

    resolution_info = { }

    for key in loggraph.keys():
      if 'Analysis against resolution' in key:
        dataset = key.split(',')[-1].strip()
        resolution_info[dataset] = transpose_loggraph(
            loggraph[key])

    # and also radiation damage stuff...

    batch_info = { }

    for key in loggraph.keys():
      if 'Analysis against Batch' in key:
        dataset = key.split(',')[-1].strip()
        batch_info[dataset] = transpose_loggraph(
            loggraph[key])


    # finally put all of the results "somewhere useful"

    self._scalr_statistics = data

    self._scalr_scaled_refl_files = copy.deepcopy(
        sc.get_scaled_reflection_files())

    self._scalr_scaled_reflection_files = { }

    # also output the unmerged scalepack format files...

    sc = self._factory.Aimless()
    sc.set_resolution(highest_resolution)
    sc.set_hklin(self._prepared_reflections)
    sc.set_scalepack()

    for epoch in epochs:
      input = self._sweep_information[epoch]
      start, end = (min(input['batches']), max(input['batches']))

      rkey = input['dname'], input['sname']
      run_resolution_limit, _ = self._scalr_resolution_limits[rkey]

      sc.add_run(start, end, exclude = False,
                 resolution = run_resolution_limit,
                 name = input['sname'])

    sc.set_hklout(os.path.join(self.get_working_directory(),
                               '%s_%s_scaled.mtz' % \
                               (self._scalr_pname,
                                self._scalr_xname)))

    if self.get_scaler_anomalous():
      sc.set_anomalous()

    sc.multi_merge()

    self._scalr_scaled_reflection_files['sca_unmerged'] = { }
    self._scalr_scaled_reflection_files['mtz_unmerged'] = { }

    for dataset in sc.get_scaled_reflection_files().keys():
      hklout = sc.get_scaled_reflection_files()[dataset]

      # then mark the scalepack files for copying...

      scalepack = os.path.join(os.path.split(hklout)[0],
                               os.path.split(hklout)[1].replace(
          '_scaled', '_scaled_unmerged').replace('.mtz', '.sca'))
      self._scalr_scaled_reflection_files['sca_unmerged'][
          dataset] = scalepack
      FileHandler.record_data_file(scalepack)
      mtz_unmerged = os.path.splitext(scalepack)[0] + '.mtz'
      self._scalr_scaled_reflection_files['mtz_unmerged'][dataset] = mtz_unmerged
      FileHandler.record_data_file(mtz_unmerged)

    if PhilIndex.params.xia2.settings.merging_statistics.source == 'cctbx':
      for key in self._scalr_scaled_refl_files:
        stats = self._compute_scaler_statistics(
          self._scalr_scaled_reflection_files['mtz_unmerged'][key], wave=key)
        self._scalr_statistics[
          (self._scalr_pname, self._scalr_xname, key)] = stats

    # convert reflection files to .sca format - use mtz2various for this

    self._scalr_scaled_reflection_files['sca'] = { }
    self._scalr_scaled_reflection_files['hkl'] = { }

    for key in self._scalr_scaled_refl_files:

      f = self._scalr_scaled_refl_files[key]
      scaout = '%s.sca' % f[:-4]

      m2v = self._factory.Mtz2various()
      m2v.set_hklin(f)
      m2v.set_hklout(scaout)
      m2v.convert()

      self._scalr_scaled_reflection_files['sca'][key] = scaout
      FileHandler.record_data_file(scaout)

      if PhilIndex.params.xia2.settings.small_molecule == True:
        hklout = '%s.hkl' % f[:-4]

        m2v = self._factory.Mtz2various()
        m2v.set_hklin(f)
        m2v.set_hklout(hklout)
        m2v.convert_shelx()

        self._scalr_scaled_reflection_files['hkl'][key] = hklout
        FileHandler.record_data_file(hklout)
Example #42
0
  def _mosflm_parallel_integrate(self):
    '''Perform the integration as before, but this time as a
    number of parallel Mosflm jobs (hence, in separate directories)
    and including a step of pre-refinement of the mosaic spread and
    missets. This will all be kind of explicit and hence probably
    messy!'''

    refinr = self.get_integrater_refiner()

    lattice = refinr.get_refiner_lattice()
    spacegroup_number = lattice_to_spacegroup(lattice)
    mosaic = refinr.get_refiner_payload('mosaic')
    beam = refinr.get_refiner_payload('beam')
    distance = refinr.get_refiner_payload('distance')
    matrix = refinr.get_refiner_payload('mosflm_orientation_matrix')

    integration_params = refinr.get_refiner_payload(
      'mosflm_integration_parameters')

    if integration_params:
      if 'separation' in integration_params:
        self.set_integrater_parameter(
            'mosflm', 'separation',
            '%s %s' % tuple(integration_params['separation']))
      if 'raster' in integration_params:
        self.set_integrater_parameter(
            'mosflm', 'raster',
            '%d %d %d %d %d' % tuple(integration_params['raster']))

    refinr.set_refiner_payload('mosflm_integration_parameters', None)
    pname, xname, dname = self.get_integrater_project_info()

    # what follows below should (i) be run in separate directories
    # and (ii) be repeated N=parallel times.

    nproc = PhilIndex.params.xia2.settings.multiprocessing.nproc
    parallel = nproc

    # FIXME this is something of a kludge - if too few frames refinement
    # and integration does not work well... ideally want at least 15
    # frames / chunk (say)
    nframes = self._intgr_wedge[1] - self._intgr_wedge[0] + 1

    if parallel > nframes / 15:
      parallel = nframes // 15

    if not parallel:
      raise RuntimeError, 'parallel not set'
    if parallel < 2:
      raise RuntimeError, 'parallel not parallel: %s' % parallel

    jobs = []
    hklouts = []
    nref = 0

    # calculate the chunks to use
    offset = self.get_frame_offset()
    start = self._intgr_wedge[0] - offset
    end = self._intgr_wedge[1] - offset

    left_images = 1 + end - start
    left_chunks = parallel
    chunks = []

    while left_images > 0:
      size = left_images // left_chunks
      chunks.append((start, start + size - 1))
      start += size
      left_images -= size
      left_chunks -= 1

    summary_files = []

    for j in range(parallel):

      # make some working directories, as necessary - chunk-(0:N-1)
      wd = os.path.join(self.get_working_directory(),
                        'chunk-%d' % j)
      if not os.path.exists(wd):
        os.makedirs(wd)

      job = MosflmIntegrate()
      job.set_working_directory(wd)

      auto_logfiler(job)

      l = refinr.get_refiner_lattice()

      # create the starting point
      f = open(os.path.join(wd, 'xiaintegrate-%s.mat' % l), 'w')
      for m in matrix:
        f.write(m)
      f.close()

      spacegroup_number = lattice_to_spacegroup(lattice)

      job.set_refine_profiles(self._mosflm_refine_profiles)

      # N.B. for harvesting need to append N to dname.

      if pname is not None and xname is not None and dname is not None:
        Debug.write('Harvesting: %s/%s/%s' %
                    (pname, xname, dname))
        harvest_dir = self.get_working_directory()
        temp_dname = '%s_%s' % \
                     (dname, self.get_integrater_sweep_name())
        job.set_pname_xname_dname(pname, xname, temp_dname)

      job.set_template(os.path.basename(self.get_template()))
      job.set_directory(self.get_directory())

      # check for ice - and if so, exclude (ranges taken from
      # XDS documentation)
      if self.get_integrater_ice() != 0:
        Debug.write('Excluding ice rings')
        job.set_exclude_ice(True)

      # exclude specified resolution ranges
      if len(self.get_integrater_excluded_regions()) != 0:
        regions = self.get_integrater_excluded_regions()
        Debug.write('Excluding regions: %s' % `regions`)
        job.set_exclude_regions(regions)

      mask = standard_mask(self.get_detector())
      for m in mask:
        job.add_instruction(m)

      job.set_input_mat_file('xiaintegrate-%s.mat' % l)

      job.set_beam_centre(beam)
      job.set_distance(distance)
      job.set_space_group_number(spacegroup_number)
      job.set_mosaic(mosaic)

      if self.get_wavelength_prov() == 'user':
        job.set_wavelength(self.get_wavelength())

      parameters = self.get_integrater_parameters('mosflm')
      job.update_parameters(parameters)

      if self._mosflm_gain:
        job.set_gain(self._mosflm_gain)

      # check for resolution limits
      if self._intgr_reso_high > 0.0:
        job.set_d_min(self._intgr_reso_high)
      if self._intgr_reso_low:
        job.set_d_max(self._intgr_reso_low)

      if PhilIndex.params.general.backstop_mask:
        from xia2.Toolkit.BackstopMask import BackstopMask
        mask = BackstopMask(PhilIndex.params.general.backstop_mask)
        mask = mask.calculate_mask_mosflm(self.get_header())
        job.set_mask(mask)

      detector = self.get_detector()
      detector_width, detector_height = detector[0].get_image_size_mm()

      lim_x = 0.5 * detector_width
      lim_y = 0.5 * detector_height

      Debug.write('Scanner limits: %.1f %.1f' % (lim_x, lim_y))
      job.set_limits(lim_x, lim_y)

      job.set_fix_mosaic(self._mosflm_postref_fix_mosaic)

      job.set_pre_refinement(True)
      job.set_image_range(chunks[j])


      # these are now running so ...

      jobs.append(job)

      continue

    # ok, at this stage I need to ...
    #
    # (i) accumulate the statistics as a function of batch
    # (ii) mong them into a single block
    #
    # This is likely to be a pain in the arse!

    first_integrated_batch = 1.0e6
    last_integrated_batch = -1.0e6

    all_residuals = []

    threads = []

    for j in range(parallel):
      job = jobs[j]

      # now wait for them to finish - first wait will really be the
      # first one, then all should be finished...

      thread = Background(job, 'run')
      thread.start()
      threads.append(thread)

    mosaics = []
    postref_result = { }

    integrated_images_first = 1.0e6
    integrated_images_last = -1.0e6
    self._intgr_per_image_statistics = {}

    for j in range(parallel):
      thread = threads[j]
      thread.stop()
      job = jobs[j]

      # get the log file
      output = job.get_all_output()

      # record a copy of it, perhaps - though not if parallel
      if self.get_integrater_sweep_name() and False:
        pname, xname, dname = self.get_integrater_project_info()
        FileHandler.record_log_file(
            '%s %s %s %s mosflm integrate' % \
            (self.get_integrater_sweep_name(),
             pname, xname, '%s_%d' % (dname, j)),
            job.get_log_file())

      # look for things that we want to know...
      # that is, the output reflection file name, the updated
      # value for the gain (if present,) any warnings, errors,
      # or just interesting facts.

      batches = job.get_batches_out()
      integrated_images_first = min(batches[0], integrated_images_first)
      integrated_images_last = max(batches[1], integrated_images_last)

      mosaics.extend(job.get_mosaic_spreads())

      if min(mosaics) < 0:
        raise IntegrationError, 'negative mosaic spread: %s' % min(mosaic)

      if (job.get_detector_gain_error() and not
          (self.get_imageset().get_detector()[0].get_type() == 'SENSOR_PAD')):
        gain = job.get_suggested_gain()
        if gain is not None:
          self.set_integrater_parameter('mosflm', 'gain', gain)
          self.set_integrater_export_parameter('mosflm', 'gain', gain)
          if self._mosflm_gain:
            Debug.write('GAIN updated to %f' % gain)
          else:
            Debug.write('GAIN found to be %f' % gain)

          self._mosflm_gain = gain
          self._mosflm_rerun_integration = True

      hklout = job.get_hklout()
      Debug.write('Integration output: %s' % hklout)
      hklouts.append(hklout)

      nref += job.get_nref()

      # if a BGSIG error happened try not refining the
      # profile and running again...

      if job.get_bgsig_too_large():
        if not self._mosflm_refine_profiles:
          raise RuntimeError, 'BGSIG error with profiles fixed'

        Debug.write(
            'BGSIG error detected - try fixing profile...')

        self._mosflm_refine_profiles = False
        self.set_integrater_done(False)

        return

      if job.get_getprof_error():
        Debug.write(
            'GETPROF error detected - try fixing profile...')
        self._mosflm_refine_profiles = False
        self.set_integrater_done(False)

        return

      # here
      # write the report for each image as .*-#$ to Chatter -
      # detailed report will be written automagically to science...

      self._intgr_per_image_statistics.update(job.get_per_image_statistics())
      postref_result.update(job.get_postref_result())

      # inspect the output for e.g. very high weighted residuals

      all_residuals.extend(job.get_residuals())

    self._intgr_batches_out = (integrated_images_first,
                               integrated_images_last)

    if mosaics and len(mosaics) > 0:
      self.set_integrater_mosaic_min_mean_max(
          min(mosaics), sum(mosaics) / len(mosaics), max(mosaics))
    else:
      m = indxr.get_indexer_mosaic()
      self.set_integrater_mosaic_min_mean_max(m, m, m)

    Chatter.write(self.show_per_image_statistics())

    Chatter.write('Mosaic spread: %.3f < %.3f < %.3f' % \
                  self.get_integrater_mosaic_min_mean_max())

    # gather the statistics from the postrefinement for all sweeps
    # now write this to a postrefinement log

    postref_log = os.path.join(self.get_working_directory(),
                               'postrefinement.log')

    fout = open(postref_log, 'w')

    fout.write('$TABLE: Postrefinement for %s:\n' % \
               self._intgr_sweep_name)
    fout.write('$GRAPHS: Missetting angles:A:1, 2, 3, 4: $$\n')
    fout.write('Batch PhiX PhiY PhiZ $$ Batch PhiX PhiY PhiZ $$\n')

    for image in sorted(postref_result):
      phix = postref_result[image].get('phix', 0.0)
      phiy = postref_result[image].get('phiy', 0.0)
      phiz = postref_result[image].get('phiz', 0.0)

      fout.write('%d %5.2f %5.2f %5.2f\n' % \
                 (image, phix, phiy, phiz))

    fout.write('$$\n')
    fout.close()

    if self.get_integrater_sweep_name():
      pname, xname, dname = self.get_integrater_project_info()
      FileHandler.record_log_file('%s %s %s %s postrefinement' % \
                                  (self.get_integrater_sweep_name(),
                                   pname, xname, dname),
                                  postref_log)

    hklouts.sort()

    hklout = os.path.join(self.get_working_directory(),
                          os.path.split(hklouts[0])[-1])

    Debug.write('Sorting data to %s' % hklout)
    for hklin in hklouts:
      Debug.write('<= %s' % hklin)

    sortmtz = Sortmtz()
    sortmtz.set_hklout(hklout)
    for hklin in hklouts:
      sortmtz.add_hklin(hklin)

    sortmtz.sort()

    self._mosflm_hklout = hklout

    return self._mosflm_hklout
Example #43
0
      raise RuntimeError, 'no resolution info'

    for epoch in epochs:

      si = self._sweep_handler.get_sweep_information(epoch)
      pname, xname, dname = si.get_project_info()
      sname = si.get_sweep_name()
      intgr = si.get_integrater()
      start, end = si.get_batch_range()

      if (dname, sname) in user_resolution_limits:
        resolution = user_resolution_limits[(dname, sname)]
        self._scalr_resolution_limits[(dname, sname)] = resolution
        if resolution < highest_resolution:
          highest_resolution = resolution
        Chatter.write('Resolution limit for %s: %5.2f' % \
                      (dname, resolution))
        continue

      hklin = sc.get_unmerged_reflection_file()
      resolution = self._estimate_resolution_limit(
        hklin, batch_range=(start, end))
      Debug.write('Resolution for sweep %s: %.2f' % \
                  (sname, resolution))

      if not (dname, sname) in self._scalr_resolution_limits:
        self._scalr_resolution_limits[(dname, sname)] = resolution
        self.set_scaler_done(False)

      if resolution < highest_resolution:
        highest_resolution = resolution
Example #44
0
File: print.py Project: hainm/xia2
def run():
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')
  Chatter.write(xinfo.get_output())
  write_citations()
Example #45
0
from xia2.Applications.xia2 import check, check_cctbx_version, check_environment
from xia2.Applications.xia2 import get_command_line, write_citations, help


def run():
  if os.path.exists('xia2-working.phil'):
    sys.argv.append('xia2-working.phil')
  try:
    check_environment()
    check()
  except exceptions.Exception, e:
    traceback.print_exc(file = open('xia2.error', 'w'))
    Chatter.write('Status: error "%s"' % str(e))

  # print the version
  Chatter.write(Version)
  Citations.cite('xia2')

  start_time = time.time()

  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  crystals = xinfo.get_crystals()
  for crystal_id, crystal in crystals.iteritems():
    #cwd = os.path.abspath(os.curdir)
    from libtbx import Auto
    scale_dir = PhilIndex.params.xia2.settings.scale.directory
    if scale_dir is Auto:
      scale_dir = 'scale'
Example #46
0
    def setup(self):
        """Set everything up..."""

        # check arguments are all ascii

        Debug.write("Start parsing command line: " + str(sys.argv))

        for token in sys.argv:
            try:
                token.encode("ascii")
            except UnicodeDecodeError:
                raise RuntimeError("non-ascii characters in input")

        self._argv = copy.deepcopy(sys.argv)

        replacements = {
            "-2d": "pipeline=2d",
            "-2di": "pipeline=2di",
            "-3d": "pipeline=3d",
            "-3di": "pipeline=3di",
            "-3dii": "pipeline=3dii",
            "-3dd": "pipeline=3dd",
            "-dials": "pipeline=dials",
            "-quick": "dials.fast_mode=true",
            "-failover": "failover=true",
            "-small_molecule": "small_molecule=true",
        }
        for k, v in replacements.iteritems():
            if k in self._argv:
                print(
                    "***\nCommand line option %s is deprecated.\nPlease use %s instead\n***"
                    % (k, v))
                self._argv[self._argv.index(k)] = v
        if "-atom" in self._argv:
            idx = self._argv.index("-atom")
            element = self._argv[idx + 1]
            self._argv[idx:idx + 2] = ["atom=%s" % element]
            print(
                "***\nCommand line option -atom %s is deprecated.\nPlease use atom=%s instead\n***"
                % (element, element))

        # first of all try to interpret arguments as phil parameters/files

        from xia2.Handlers.Phil import master_phil
        from libtbx.phil import command_line

        cmd_line = command_line.argument_interpreter(master_phil=master_phil)
        working_phil, self._argv = cmd_line.process_and_fetch(
            args=self._argv, custom_processor="collect_remaining")

        PhilIndex.merge_phil(working_phil)
        try:
            params = PhilIndex.get_python_object()
        except RuntimeError as e:
            raise Sorry(e)

        # sanity check / interpret Auto in input
        from libtbx import Auto

        if params.xia2.settings.input.atom is None:
            if params.xia2.settings.input.anomalous is Auto:
                PhilIndex.update("xia2.settings.input.anomalous=false")
        else:
            if params.xia2.settings.input.anomalous is False:
                raise Sorry(
                    "Setting anomalous=false and atom type inconsistent")
            params.xia2.settings.input.anomalous = True
            PhilIndex.update("xia2.settings.input.anomalous=true")

        if params.xia2.settings.resolution.keep_all_reflections is Auto:
            if (params.xia2.settings.small_molecule is True
                    and params.xia2.settings.resolution.d_min is None
                    and params.xia2.settings.resolution.d_max is None):
                PhilIndex.update(
                    "xia2.settings.resolution.keep_all_reflections=true")
            else:
                PhilIndex.update(
                    "xia2.settings.resolution.keep_all_reflections=false")

        if params.xia2.settings.small_molecule is True:
            Debug.write("Small molecule selected")
            if params.xia2.settings.symmetry.chirality is None:
                PhilIndex.update("xia2.settings.symmetry.chirality=nonchiral")
            params = PhilIndex.get_python_object()

        # pipeline options
        self._read_pipeline()

        for (parameter, value) in (
            ("project", params.xia2.settings.project),
            ("crystal", params.xia2.settings.crystal),
        ):
            validate_project_crystal_name(parameter, value)

        Debug.write("Project: %s" % params.xia2.settings.project)
        Debug.write("Crystal: %s" % params.xia2.settings.crystal)

        # FIXME add some consistency checks in here e.g. that there are
        # images assigned, there is a lattice assigned if cell constants
        # are given and so on

        params = PhilIndex.get_python_object()
        mp_params = params.xia2.settings.multiprocessing
        from xia2.Handlers.Environment import get_number_cpus

        if mp_params.mode == "parallel":
            if mp_params.type == "qsub":
                if which("qsub") is None:
                    raise Sorry("qsub not available")
            if mp_params.njob is Auto:
                mp_params.njob = get_number_cpus()
                if mp_params.nproc is Auto:
                    mp_params.nproc = 1
            elif mp_params.nproc is Auto:
                mp_params.nproc = get_number_cpus()
        elif mp_params.mode == "serial":
            if mp_params.type == "qsub":
                if which("qsub") is None:
                    raise Sorry("qsub not available")
            if mp_params.njob is Auto:
                mp_params.njob = 1
            if mp_params.nproc is Auto:
                mp_params.nproc = get_number_cpus()

        PhilIndex.update("xia2.settings.multiprocessing.njob=%d" %
                         mp_params.njob)
        PhilIndex.update("xia2.settings.multiprocessing.nproc=%d" %
                         mp_params.nproc)
        params = PhilIndex.get_python_object()
        mp_params = params.xia2.settings.multiprocessing

        if mp_params.nproc > 1 and os.name == "nt":
            raise Sorry("nproc > 1 is not supported on Windows.")  # #191

        if params.xia2.settings.indexer is not None:
            add_preference("indexer", params.xia2.settings.indexer)
        if params.xia2.settings.refiner is not None:
            add_preference("refiner", params.xia2.settings.refiner)
        if params.xia2.settings.integrater is not None:
            add_preference("integrater", params.xia2.settings.integrater)
        if params.xia2.settings.scaler is not None:
            add_preference("scaler", params.xia2.settings.scaler)

        if params.xia2.settings.multi_sweep_indexing is Auto:
            if (params.xia2.settings.small_molecule is True
                    and "dials" == params.xia2.settings.indexer):
                PhilIndex.update("xia2.settings.multi_sweep_indexing=True")
            else:
                PhilIndex.update("xia2.settings.multi_sweep_indexing=False")
        if (params.xia2.settings.multi_sweep_indexing is True
                and params.xia2.settings.multiprocessing.mode == "parallel"):
            Chatter.write(
                "Multi sweep indexing disabled:\nMSI is not available for parallel processing."
            )
            PhilIndex.update("xia2.settings.multi_sweep_indexing=False")

        input_json = params.xia2.settings.input.json
        if input_json is not None and len(input_json):
            for json_file in input_json:
                assert os.path.isfile(json_file)
                load_experiments(json_file)

        reference_geometry = params.xia2.settings.input.reference_geometry
        if reference_geometry is not None and len(reference_geometry) > 0:
            reference_geometries = "\n".join([
                "xia2.settings.input.reference_geometry=%s" %
                os.path.abspath(g)
                for g in params.xia2.settings.input.reference_geometry
            ])
            Debug.write(reference_geometries)
            PhilIndex.update(reference_geometries)
            Debug.write("xia2.settings.trust_beam_centre=true")
            PhilIndex.update("xia2.settings.trust_beam_centre=true")
            params = PhilIndex.get_python_object()

        params = PhilIndex.get_python_object()
        if params.xia2.settings.input.xinfo is not None:
            xinfo_file = os.path.abspath(params.xia2.settings.input.xinfo)
            PhilIndex.update("xia2.settings.input.xinfo=%s" % xinfo_file)
            params = PhilIndex.get_python_object()
            self.set_xinfo(xinfo_file)

            # issue #55 if not set ATOM in xinfo but anomalous=true or atom= set
            # on commandline, set here, should be idempotent

            if params.xia2.settings.input.anomalous is True:
                crystals = self._xinfo.get_crystals()
                for xname in crystals:
                    xtal = crystals[xname]
                    Debug.write("Setting anomalous for crystal %s" % xname)
                    xtal.set_anomalous(True)
        else:
            xinfo_file = "%s/automatic.xinfo" % os.path.abspath(os.curdir)
            PhilIndex.update("xia2.settings.input.xinfo=%s" % xinfo_file)
            params = PhilIndex.get_python_object()

        if params.dials.find_spots.phil_file is not None:
            PhilIndex.update(
                "dials.find_spots.phil_file=%s" %
                os.path.abspath(params.dials.find_spots.phil_file))
        if params.dials.index.phil_file is not None:
            PhilIndex.update("dials.index.phil_file=%s" %
                             os.path.abspath(params.dials.index.phil_file))
        if params.dials.refine.phil_file is not None:
            PhilIndex.update("dials.refine.phil_file=%s" %
                             os.path.abspath(params.dials.refine.phil_file))
        if params.dials.integrate.phil_file is not None:
            PhilIndex.update("dials.integrate.phil_file=%s" %
                             os.path.abspath(params.dials.integrate.phil_file))
        if params.xds.index.xparm is not None:
            Flags.set_xparm(params.xds.index.xparm)
        if params.xds.index.xparm_ub is not None:
            Flags.set_xparm_ub(params.xds.index.xparm_ub)

        if params.xia2.settings.scale.freer_file is not None:
            freer_file = os.path.abspath(params.xia2.settings.scale.freer_file)
            if not os.path.exists(freer_file):
                raise RuntimeError("%s does not exist" % freer_file)
            from xia2.Modules.FindFreeFlag import FindFreeFlag

            column = FindFreeFlag(freer_file)
            Debug.write("FreeR_flag column in %s found: %s" %
                        (freer_file, column))
            PhilIndex.update("xia2.settings.scale.freer_file=%s" % freer_file)

        if params.xia2.settings.scale.reference_reflection_file is not None:
            reference_reflection_file = os.path.abspath(
                params.xia2.settings.scale.reference_reflection_file)
            if not os.path.exists(reference_reflection_file):
                raise RuntimeError("%s does not exist" %
                                   reference_reflection_file)
            PhilIndex.update(
                "xia2.settings.scale.reference_reflection_file=%s" %
                reference_reflection_file)

        params = PhilIndex.get_python_object()

        datasets = unroll_datasets(PhilIndex.params.xia2.settings.input.image)

        for dataset in datasets:

            start_end = None

            # here we only care about ':' which are later than C:\
            if ":" in dataset[3:]:
                tokens = dataset.split(":")
                # cope with windows drives i.e. C:\data\blah\thing_0001.cbf:1:100
                if len(tokens[0]) == 1:
                    tokens = ["%s:%s" % (tokens[0], tokens[1])] + tokens[2:]
                if len(tokens) != 3:
                    raise RuntimeError("/path/to/image_0001.cbf:start:end")

                dataset = tokens[0]
                start_end = int(tokens[1]), int(tokens[2])

            from xia2.Applications.xia2setup import is_hd5f_name

            if os.path.exists(os.path.abspath(dataset)):
                dataset = os.path.abspath(dataset)
            else:
                directories = [os.getcwd()] + self._argv[1:]
                found = False
                for d in directories:
                    if os.path.exists(os.path.join(d, dataset)):
                        dataset = os.path.join(d, dataset)
                        found = True
                        break
                if not found:
                    raise Sorry("Could not find %s in %s" %
                                (dataset, " ".join(directories)))

            if is_hd5f_name(dataset):
                self._hdf5_master_files.append(dataset)
                if start_end:
                    Debug.write("Image range: %d %d" % start_end)
                    if dataset not in self._default_start_end:
                        self._default_start_end[dataset] = []
                    self._default_start_end[dataset].append(start_end)
                else:
                    Debug.write("No image range specified")

            else:
                template, directory = image2template_directory(
                    os.path.abspath(dataset))

                self._default_template.append(os.path.join(
                    directory, template))
                self._default_directory.append(directory)

                Debug.write("Interpreted from image %s:" % dataset)
                Debug.write("Template %s" % template)
                Debug.write("Directory %s" % directory)

                if start_end:
                    Debug.write("Image range: %d %d" % start_end)
                    key = os.path.join(directory, template)
                    if key not in self._default_start_end:
                        self._default_start_end[key] = []
                    self._default_start_end[key].append(start_end)
                else:
                    Debug.write("No image range specified")

        # finally, check that all arguments were read and raise an exception
        # if any of them were nonsense.

        with open("xia2-working.phil", "wb") as f:
            f.write(PhilIndex.working_phil.as_str())
            f.write(
                os.linesep
            )  # temporarily required for https://github.com/dials/dials/issues/522
        with open("xia2-diff.phil", "wb") as f:
            f.write(PhilIndex.get_diff().as_str())
            f.write(
                os.linesep
            )  # temporarily required for https://github.com/dials/dials/issues/522

        Debug.write("\nDifference PHIL:")
        Debug.write(PhilIndex.get_diff().as_str(), strip=False)

        Debug.write("Working PHIL:")
        Debug.write(PhilIndex.working_phil.as_str(), strip=False)

        nonsense = "Unknown command-line options:"
        was_nonsense = False

        for j, argv in enumerate(self._argv):
            if j == 0:
                continue
            if argv[0] != "-" and "=" not in argv:
                continue
            if j not in self._understood:
                nonsense += " %s" % argv
                was_nonsense = True

        if was_nonsense:
            raise RuntimeError(nonsense)
Example #47
0
def compute_nmol_from_volume(volume, mass, resolution):

  file = open(nmolparams, 'r')

  while 1:
    line = file.readline()
    if not line[0] == '#':
      break

  resolutions = map(float, line.split(','))[:13]
  P0_list = map(float, file.readline().split(','))[:13]
  Vm_bar_list = map(float, file.readline().split(','))[:13]
  w_list = map(float, file.readline().split(','))[:13]
  A_list = map(float, file.readline().split(','))[:13]
  s_list = map(float, file.readline().split(','))[:13]
  file.close()

  if resolution > resolutions[-1]:
    # raise RuntimeError, 'Resolution outside useful range'
    Chatter.write('Resolution lower than %s -> computing for %f' % \
                  (resolutions[-1], resolution))
    resolution = resolutions[-1]
  if resolution < resolutions[0]:
    Chatter.write('Resolution higher than peak %f -> %f' % \
                  (resolution, resolutions[0]))
    resolution = resolutions[0]


  start = 0

  for start in range(12):
    if resolution > resolutions[start] and \
       resolution < resolutions[start + 1]:
      break

  # can start at start - interpolate

  diff = (resolution - resolutions[start]) / \
         (resolutions[start + 1] - resolutions[start])

  P0 = P0_list[start] + diff * (P0_list[start + 1] - P0_list[start])
  Vm_bar = Vm_bar_list[start] + \
           diff * (Vm_bar_list[start + 1] - Vm_bar_list[start])
  w = w_list[start] + diff * (w_list[start + 1] - w_list[start])
  A = A_list[start] + diff * (A_list[start + 1] - A_list[start])
  s = s_list[start] + diff * (s_list[start + 1] - s_list[start])

  nmols = []
  pdfs = []

  nmol = 1
  Vm = volume / (mass * nmol)
  while 1:
    z = (Vm - Vm_bar) / w
    p = P0 + A * math.exp(- math.exp(-z) - z * s + 1)
    nmols.append(nmol)
    pdfs.append(p)
    nmol += 1
    Vm = volume / (mass * nmol)
    if Vm < 1.0:
      break

  return nmols, pdfs
Example #48
0
      raise RuntimeError, 'processing abandoned'

    self._intgr_batches_out = integrater.get_batches_out()

    mosaics = integrater.get_mosaic_spreads()
    if mosaics and len(mosaics) > 0:
      self.set_integrater_mosaic_min_mean_max(
          min(mosaics), sum(mosaics) / len(mosaics), max(mosaics))
    else:
      m = indxr.get_indexer_mosaic()
      self.set_integrater_mosaic_min_mean_max(m, m, m)

    # write the report for each image as .*-#$ to Chatter -
    # detailed report will be written automagically to science...

    Chatter.write(self.show_per_image_statistics())

    Chatter.write('Mosaic spread: %.3f < %.3f < %.3f' % \
                  self.get_integrater_mosaic_min_mean_max())

    # gather the statistics from the postrefinement
    postref_result = integrater.get_postref_result()

    # now write this to a postrefinement log
    postref_log = os.path.join(self.get_working_directory(),
                               'postrefinement.log')

    fout = open(postref_log, 'w')

    fout.write('$TABLE: Postrefinement for %s:\n' % \
               self._intgr_sweep_name)
Example #49
0
File: Indexer.py Project: xia2/xia2
  def index(self):

    if not self.get_indexer_finish_done():
      f = inspect.currentframe().f_back.f_back
      m = f.f_code.co_filename
      l = f.f_lineno

      Debug.write('Index in %s called from %s %d' %
                  (self.__class__.__name__, m, l))

    while not self.get_indexer_finish_done():
      while not self.get_indexer_done():
        while not self.get_indexer_prepare_done():

          # --------------
          # call prepare()
          # --------------

          self.set_indexer_prepare_done(True)
          self._index_prepare()

        # --------------------------------------------
        # then do the proper indexing - using the best
        # solution already stored if available (c/f
        # eliminate above)
        # --------------------------------------------

        self.set_indexer_done(True)

        if self.get_indexer_sweeps():
          xsweeps = [ s.get_name() for s in self.get_indexer_sweeps() ]
          if len(xsweeps) > 1:
            # find "SWEEPn, SWEEP(n+1), (..), SWEEPm" and aggregate to "SWEEPS n-m"
            xsweeps = map(lambda x: (int(x[5:]), int(x[5:])) if x.startswith('SWEEP') else x, xsweeps)
            xsweeps[0] = [xsweeps[0]]
            def compress(seen, nxt):
              if isinstance(seen[-1], tuple) and isinstance(nxt, tuple) and (seen[-1][1] + 1 == nxt[0]):
                seen[-1] = (seen[-1][0], nxt[1])
              else:
                seen.append(nxt)
              return seen
            xsweeps = reduce(compress, xsweeps)
            xsweeps = map(lambda x: ('SWEEP%d' % x[0] if x[0] == x[1] else
                                     'SWEEPS %d to %d' % (x[0], x[1])) if isinstance(x, tuple)
                                     else x, xsweeps)
          if len(xsweeps) > 1:
            sweep_names = ', '.join(xsweeps[:-1])
            sweep_names += ' & ' + xsweeps[-1]
          else:
            sweep_names = xsweeps[0]

          if PhilIndex.params.xia2.settings.show_template:
            template = self.get_indexer_sweep().get_template()
            Chatter.banner(
              'Autoindexing %s (%s)' %(sweep_names, template))
          else:
            Chatter.banner('Autoindexing %s' %sweep_names)


        if not self._indxr_helper:

          result = self._index()

          if not self._indxr_done:
            Debug.write(
              'Looks like indexing failed - try again!')
            continue

          solutions = { }
          for k in self._indxr_other_lattice_cell.keys():
            solutions[k] = self._indxr_other_lattice_cell[k][
              'cell']

          # create a helper for the indexer to manage solutions
          self._indxr_helper = _IndexerHelper(solutions)

          solution = self._indxr_helper.get()

          # compare these against the final solution, if different
          # reject solution and return - correct solution will
          # be used next cycle

          if self._indxr_lattice != solution[0] and \
             not self._indxr_input_cell and \
             not PhilIndex.params.xia2.settings.integrate_p1:
            Chatter.write('Rerunning indexing lattice %s to %s' %
                          (self._indxr_lattice, solution[0]))
            Debug.write(
              'Rerunning indexing with target lattice %s' % \
              solution[0])
            self.set_indexer_done(False)

        else:
          # rerun autoindexing with the best known current solution

          solution = self._indxr_helper.get()
          self._indxr_input_lattice = solution[0]
          self._indxr_input_cell = solution[1]
          result = self._index()

      # next finish up...

      self.set_indexer_finish_done(True)
      self._index_finish()

      if self._indxr_print:
        Chatter.write(self.show_indexer_solutions())
Example #50
0
    def integrate(self):
        '''Actually perform integration until we think we are done...'''

        while not self.get_integrater_finish_done():
            while not self.get_integrater_done():
                while not self.get_integrater_prepare_done():

                    Debug.write('Preparing to do some integration...')
                    self.set_integrater_prepare_done(True)

                    # if this raises an exception, perhaps the autoindexing
                    # solution has too high symmetry. if this the case, then
                    # perform a self._intgr_indexer.eliminate() - this should
                    # reset the indexing system

                    try:
                        self._integrate_prepare()

                    except BadLatticeError as e:

                        Journal.banner('eliminated this lattice', size=80)

                        Chatter.write('Rejecting bad lattice %s' % str(e))
                        self._intgr_refiner.eliminate()
                        self._integrater_reset()

                # FIXME x1698 - may be the case that _integrate() returns the
                # raw intensities, _integrate_finish() returns intensities
                # which may have been adjusted or corrected. See #1698 below.

                Debug.write('Doing some integration...')

                self.set_integrater_done(True)

                template = self.get_integrater_sweep().get_template()

                if self._intgr_sweep_name:
                    if PhilIndex.params.xia2.settings.show_template:
                        Chatter.banner('Integrating %s (%s)' % \
                                       (self._intgr_sweep_name, template))
                    else:
                        Chatter.banner('Integrating %s' % \
                                       (self._intgr_sweep_name))
                try:

                    #1698
                    self._intgr_hklout_raw = self._integrate()

                except BadLatticeError as e:
                    Chatter.write('Rejecting bad lattice %s' % str(e))

                    Journal.banner('eliminated this lattice', size=80)

                    self._intgr_refiner.eliminate()
                    self._integrater_reset()

            self.set_integrater_finish_done(True)

            try:
                # allow for the fact that postrefinement may be used
                # to reject the lattice...

                self._intgr_hklout = self._integrate_finish()

            except BadLatticeError as e:
                Chatter.write('Bad Lattice Error: %s' % str(e))
                self._intgr_refiner.eliminate()
                self._integrater_reset()

        return self._intgr_hklout
Example #51
0
    def _index_prepare(self):

        from xia2.Handlers.Citations import Citations
        Citations.cite('dials')

        #all_images = self.get_matching_images()
        #first = min(all_images)
        #last = max(all_images)

        spot_lists = []
        datablocks = []

        for imageset, xsweep in zip(self._indxr_imagesets, self._indxr_sweeps):

            Chatter.banner('Spotfinding %s' % xsweep.get_name())

            first, last = imageset.get_scan().get_image_range()

            # at this stage, break out to run the DIALS code: this sets itself up
            # now cheat and pass in some information... save re-reading all of the
            # image headers

            # FIXME need to adjust this to allow (say) three chunks of images

            from dxtbx.serialize import dump
            from dxtbx.datablock import DataBlock
            sweep_filename = os.path.join(
                self.get_working_directory(),
                '%s_datablock.json' % xsweep.get_name())
            dump.datablock(DataBlock([imageset]), sweep_filename)

            genmask = self.GenerateMask()
            genmask.set_input_datablock(sweep_filename)
            genmask.set_output_datablock(
                os.path.join(
                    self.get_working_directory(), '%s_%s_datablock.json' %
                    (genmask.get_xpid(), xsweep.get_name())))
            genmask.set_params(PhilIndex.params.dials.masking)
            sweep_filename, mask_pickle = genmask.run()
            Debug.write('Generated mask for %s: %s' %
                        (xsweep.get_name(), mask_pickle))

            gain = PhilIndex.params.xia2.settings.input.gain
            if gain is libtbx.Auto:
                gain_estimater = self.EstimateGain()
                gain_estimater.set_sweep_filename(sweep_filename)
                gain_estimater.run()
                gain = gain_estimater.get_gain()
                Chatter.write('Estimated gain: %.2f' % gain)
                PhilIndex.params.xia2.settings.input.gain = gain

            # FIXME this should really use the assigned spot finding regions
            #offset = self.get_frame_offset()
            spotfinder = self.Spotfinder()
            if last - first > 10:
                spotfinder.set_write_hot_mask(True)
            spotfinder.set_input_sweep_filename(sweep_filename)
            spotfinder.set_output_sweep_filename(
                '%s_%s_datablock.json' %
                (spotfinder.get_xpid(), xsweep.get_name()))
            spotfinder.set_input_spot_filename(
                '%s_%s_strong.pickle' %
                (spotfinder.get_xpid(), xsweep.get_name()))
            if PhilIndex.params.dials.fast_mode:
                wedges = self._index_select_images_i(imageset)
                spotfinder.set_scan_ranges(wedges)
            else:
                spotfinder.set_scan_ranges([(first, last)])
            if PhilIndex.params.dials.find_spots.phil_file is not None:
                spotfinder.set_phil_file(
                    PhilIndex.params.dials.find_spots.phil_file)
            min_spot_size = PhilIndex.params.dials.find_spots.min_spot_size
            if min_spot_size is libtbx.Auto:
                if imageset.get_detector()[0].get_type() == 'SENSOR_PAD':
                    min_spot_size = 3
                else:
                    min_spot_size = None
            if min_spot_size is not None:
                spotfinder.set_min_spot_size(min_spot_size)
            min_local = PhilIndex.params.dials.find_spots.min_local
            if min_local is not None:
                spotfinder.set_min_local(min_local)
            sigma_strong = PhilIndex.params.dials.find_spots.sigma_strong
            if sigma_strong:
                spotfinder.set_sigma_strong(sigma_strong)
            gain = PhilIndex.params.xia2.settings.input.gain
            if gain:
                spotfinder.set_gain(gain)
            filter_ice_rings = PhilIndex.params.dials.find_spots.filter_ice_rings
            if filter_ice_rings:
                spotfinder.set_filter_ice_rings(filter_ice_rings)
            kernel_size = PhilIndex.params.dials.find_spots.kernel_size
            if kernel_size:
                spotfinder.set_kernel_size(kernel_size)
            global_threshold = PhilIndex.params.dials.find_spots.global_threshold
            if global_threshold is not None:
                spotfinder.set_global_threshold(global_threshold)
            spotfinder.run()

            spot_filename = spotfinder.get_spot_filename()
            if not os.path.exists(spot_filename):
                raise RuntimeError("Spotfinding failed: %s does not exist." %
                                   os.path.basename(spot_filename))

            spot_lists.append(spot_filename)
            datablocks.append(spotfinder.get_output_sweep_filename())

            from libtbx import easy_pickle
            from dials.util.ascii_art import spot_counts_per_image_plot
            refl = easy_pickle.load(spot_filename)
            if not len(refl):
                raise RuntimeError('No spots found in sweep %s' %
                                   xsweep.get_name())
            Chatter.write(spot_counts_per_image_plot(refl), strip=False)

            if not PhilIndex.params.dials.fast_mode:
                detectblanks = self.DetectBlanks()
                detectblanks.set_sweep_filename(datablocks[-1])
                detectblanks.set_reflections_filename(spot_filename)
                detectblanks.run()
                json = detectblanks.get_results()
                offset = imageset.get_scan().get_image_range()[0]
                blank_regions = json['strong']['blank_regions']
                if len(blank_regions):
                    blank_regions = [(int(s), int(e))
                                     for s, e in blank_regions]
                    for blank_start, blank_end in blank_regions:
                        Chatter.write(
                            'WARNING: Potential blank images: %i -> %i' %
                            (blank_start + 1, blank_end))

                    if PhilIndex.params.xia2.settings.remove_blanks:
                        non_blanks = []
                        start, end = imageset.get_array_range()
                        last_blank_end = start
                        for blank_start, blank_end in blank_regions:
                            if blank_start > start:
                                non_blanks.append(
                                    (last_blank_end, blank_start))
                            last_blank_end = blank_end

                        if last_blank_end + 1 < end:
                            non_blanks.append((last_blank_end, end))

                        xsweep = self.get_indexer_sweep()
                        xwav = xsweep.get_wavelength()
                        xsample = xsweep.get_xsample()

                        sweep_name = xsweep.get_name()
                        import string
                        for i, (nb_start, nb_end) in enumerate(non_blanks):
                            assert i < 26
                            if i == 0:
                                sub_imageset = imageset[nb_start -
                                                        start:nb_end - start]
                                xsweep._frames_to_process = (nb_start + 1,
                                                             nb_end + 1)
                                self.set_indexer_prepare_done(done=False)
                                self._indxr_imagesets[
                                    self._indxr_imagesets.index(
                                        imageset)] = sub_imageset
                                xsweep._integrater._setup_from_imageset(
                                    sub_imageset)
                            else:
                                new_name = '_'.join(
                                    (sweep_name, string.ascii_lowercase[i]))
                                new_sweep = xwav.add_sweep(
                                    new_name,
                                    xsample,
                                    directory=os.path.join(
                                        os.path.basename(
                                            xsweep.get_directory()), new_name),
                                    image=imageset.get_path(nb_start - start),
                                    frames_to_process=(nb_start + 1, nb_end),
                                )
                                Chatter.write(
                                    "Generating new sweep: %s (%s:%i:%i)" %
                                    (new_sweep.get_name(),
                                     new_sweep.get_image(),
                                     new_sweep.get_frames_to_process()[0],
                                     new_sweep.get_frames_to_process()[1]))
                        return

            if not PhilIndex.params.xia2.settings.trust_beam_centre:
                discovery = self.DiscoverBetterExperimentalModel()
                discovery.set_sweep_filename(datablocks[-1])
                discovery.set_spot_filename(spot_filename)
                #wedges = self._index_select_images_i(imageset)
                #discovery.set_scan_ranges(wedges)
                #discovery.set_scan_ranges([(first + offset, last + offset)])
                try:
                    discovery.run()
                except Exception as e:
                    Debug.write('DIALS beam centre search failed: %s' % str(e))
                else:
                    # overwrite datablock.json in datablocks list
                    datablocks[
                        -1] = discovery.get_optimized_datablock_filename()

        self.set_indexer_payload("spot_lists", spot_lists)
        self.set_indexer_payload("datablocks", datablocks)

        return
Example #52
0
  def _integrate(self):
    '''Actually do the integration - in XDS terms this will mean running
    DEFPIX and INTEGRATE to measure all the reflections.'''

    experiment = self._intgr_refiner.get_refined_experiment_list(
      self.get_integrater_epoch())[0]
    crystal_model = experiment.crystal
    self._intgr_refiner_cell = crystal_model.get_unit_cell().parameters()

    images_str = '%d to %d' % tuple(self._intgr_wedge)
    cell_str = '%.2f %.2f %.2f %.2f %.2f %.2f' %tuple(self._intgr_refiner_cell)

    if len(self._fp_directory) <= 50:
      dirname = self._fp_directory
    else:
      dirname = '...%s' % self._fp_directory[-46:]

    Journal.block(
        'integrating', self._intgr_sweep_name, 'XDS',
        {'images':images_str,
         'cell':cell_str,
         'lattice':self._intgr_refiner.get_refiner_lattice(),
         'template':self._fp_template,
         'directory':dirname,
         'resolution':'%.2f' % self._intgr_reso_high})

    first_image_in_wedge = self.get_image_name(self._intgr_wedge[0])

    defpix = self.Defpix()

    # pass in the correct data

    for file in ['X-CORRECTIONS.cbf',
                 'Y-CORRECTIONS.cbf',
                 'BKGINIT.cbf',
                 'XPARM.XDS']:
      defpix.set_input_data_file(file, self._xds_data_files[file])

    defpix.set_data_range(self._intgr_wedge[0],
                          self._intgr_wedge[1])

    if self.get_integrater_high_resolution() > 0.0 and \
           self.get_integrater_user_resolution():
      Debug.write('Setting resolution limit in DEFPIX to %.2f' % \
                  self.get_integrater_high_resolution())
      defpix.set_resolution_high(self.get_integrater_high_resolution())
      defpix.set_resolution_low(self.get_integrater_low_resolution())

    elif self.get_integrater_low_resolution():
      Debug.write('Setting low resolution limit in DEFPIX to %.2f' % \
                  self.get_integrater_low_resolution())
      defpix.set_resolution_high(0.0)
      defpix.set_resolution_low(self.get_integrater_low_resolution())

    defpix.run()

    # and gather the result files
    for file in ['BKGPIX.cbf',
                 'ABS.cbf']:
      self._xds_data_files[file] = defpix.get_output_data_file(file)

    integrate = self.Integrate()

    if self._xds_integrate_parameters:
      integrate.set_updates(self._xds_integrate_parameters)

    # decide what images we are going to process, if not already
    # specified

    if not self._intgr_wedge:
      images = self.get_matching_images()
      self.set_integrater_wedge(min(images),
                                max(images))

    first_image_in_wedge = self.get_image_name(self._intgr_wedge[0])

    integrate.set_data_range(self._intgr_wedge[0],
                             self._intgr_wedge[1])

    for file in ['X-CORRECTIONS.cbf',
                 'Y-CORRECTIONS.cbf',
                 'BLANK.cbf',
                 'BKGPIX.cbf',
                 'GAIN.cbf']:
      integrate.set_input_data_file(file, self._xds_data_files[file])

    if self._xds_data_files.has_key('GXPARM.XDS'):
      Debug.write('Using globally refined parameters')
      integrate.set_input_data_file(
          'XPARM.XDS', self._xds_data_files['GXPARM.XDS'])
      integrate.set_refined_xparm()
    else:
      integrate.set_input_data_file(
          'XPARM.XDS', self._xds_data_files['XPARM.XDS'])

    integrate.run()

    self._intgr_per_image_statistics = integrate.get_per_image_statistics()
    Chatter.write(self.show_per_image_statistics())

    # record the log file -

    pname, xname, dname = self.get_integrater_project_info()
    sweep = self.get_integrater_sweep_name()
    FileHandler.record_log_file('%s %s %s %s INTEGRATE' % \
                                (pname, xname, dname, sweep),
                                os.path.join(self.get_working_directory(),
                                             'INTEGRATE.LP'))

    # and copy the first pass INTEGRATE.HKL...

    lattice = self._intgr_refiner.get_refiner_lattice()
    if not os.path.exists(os.path.join(
        self.get_working_directory(),
        'INTEGRATE-%s.HKL' % lattice)):
      here = self.get_working_directory()
      shutil.copyfile(os.path.join(here, 'INTEGRATE.HKL'),
                      os.path.join(here, 'INTEGRATE-%s.HKL' % lattice))

    # record INTEGRATE.HKL for e.g. BLEND.

    FileHandler.record_more_data_file(
        '%s %s %s %s INTEGRATE' % (pname, xname, dname, sweep),
        os.path.join(self.get_working_directory(), 'INTEGRATE.HKL'))

    # should the existence of these require that I rerun the
    # integration or can we assume that the application of a
    # sensible resolution limit will achieve this??

    self._xds_integrate_parameters = integrate.get_updates()

    # record the mosaic spread &c.

    m_min, m_mean, m_max = integrate.get_mosaic()
    self.set_integrater_mosaic_min_mean_max(m_min, m_mean, m_max)

    Chatter.write('Mosaic spread: %.3f < %.3f < %.3f' % \
                  self.get_integrater_mosaic_min_mean_max())

    return os.path.join(self.get_working_directory(), 'INTEGRATE.HKL')
Example #53
0
def run():
    from libtbx.utils import Sorry
    if len(sys.argv) < 2 or '-help' in sys.argv or '--help' in sys.argv:
        help()
        sys.exit()

    if '-version' in sys.argv or '--version' in sys.argv:
        import xia2.XIA2Version
        print xia2.XIA2Version.Version
        print dials_version()
        ccp4_version = get_ccp4_version()
        if ccp4_version is not None:
            print 'CCP4 %s' % ccp4_version
        sys.exit()

    try:
        check_environment()
    except Exception as e:
        traceback.print_exc(file=open('xia2.error', 'w'))
        Chatter.write('Error setting up xia2 environment: %s' % str(e))
        Chatter.write(
            'Please send the contents of xia2.txt, xia2.error and xia2-debug.txt to:'
        )
        Chatter.write('*****@*****.**')
        sys.exit(1)

    wd = os.getcwd()

    try:
        xinfo = xia2_main()
        Chatter.write('Status: normal termination')

        Debug.write('\n------\nTiming summary:')
        import xia2.Driver.DefaultDriver
        xia2.Driver.DefaultDriver.output_timing_information()
        return xinfo
    except Sorry as s:
        Chatter.write('Error: %s' % str(s))
        sys.exit(1)
    except Exception as e:
        traceback.print_exc(file=open(os.path.join(wd, 'xia2.error'), 'w'))
        Chatter.write('Error: %s' % str(e))
        Chatter.write(
            'Please send the contents of xia2.txt, xia2.error and xia2-debug.txt to:'
        )
        Chatter.write('*****@*****.**')
        sys.exit(1)
Example #54
0
  def _integrate_finish(self):
    '''Finish off the integration by running correct.'''

    # first run the postrefinement etc with spacegroup P1
    # and the current unit cell - this will be used to
    # obtain a benchmark rmsd in pixels / phi and also
    # cell deviations (this is working towards spotting bad
    # indexing solutions) - only do this if we have no
    # reindex matrix... and no postrefined cell...

    p1_deviations = None

    # fix for bug # 3264 -
    # if we have not run integration with refined parameters, make it so...
    # erm? shouldn't this therefore return if this is the principle, or
    # set the flag after we have tested the lattice?

    if not self._xds_data_files.has_key('GXPARM.XDS') and \
      PhilIndex.params.xds.integrate.reintegrate:
      Debug.write(
          'Resetting integrater, to ensure refined orientation is used')
      self.set_integrater_done(False)

    if not self.get_integrater_reindex_matrix() and not self._intgr_cell \
           and not Flags.get_no_lattice_test() and \
           not self.get_integrater_sweep().get_user_lattice():
      correct = self.Correct()

      correct.set_data_range(self._intgr_wedge[0],
                             self._intgr_wedge[1])

      if self.get_polarization() > 0.0:
        correct.set_polarization(self.get_polarization())

      # FIXME should this be using the correctly transformed
      # cell or are the results ok without it?!

      correct.set_spacegroup_number(1)
      correct.set_cell(self._intgr_refiner_cell)

      correct.run()

      # record the log file -

      pname, xname, dname = self.get_integrater_project_info()
      sweep = self.get_integrater_sweep_name()
      FileHandler.record_log_file('%s %s %s %s CORRECT' % \
                                  (pname, xname, dname, sweep),
                                  os.path.join(
          self.get_working_directory(),
          'CORRECT.LP'))

      FileHandler.record_more_data_file(
          '%s %s %s %s CORRECT' % (pname, xname, dname, sweep),
          os.path.join(self.get_working_directory(), 'XDS_ASCII.HKL'))

      cell = correct.get_result('cell')
      cell_esd = correct.get_result('cell_esd')

      Debug.write('Postrefinement in P1 results:')
      Debug.write('%7.3f %7.3f %7.3f %7.3f %7.3f %7.3f' % \
                  tuple(cell))
      Debug.write('%7.3f %7.3f %7.3f %7.3f %7.3f %7.3f' % \
                  tuple(cell_esd))
      Debug.write('Deviations: %.2f pixels %.2f degrees' % \
                  (correct.get_result('rmsd_pixel'),
                   correct.get_result('rmsd_phi')))

      p1_deviations = (correct.get_result('rmsd_pixel'),
                       correct.get_result('rmsd_phi'))

    # next run the postrefinement etc with the given
    # cell / lattice - this will be the assumed result...

    correct = self.Correct()

    correct.set_data_range(self._intgr_wedge[0],
                           self._intgr_wedge[1])

    if self.get_polarization() > 0.0:
      correct.set_polarization(self.get_polarization())

    # BUG # 2695 probably comes from here - need to check...
    # if the pointless interface comes back with a different
    # crystal setting then the unit cell stored in self._intgr_cell
    # needs to be set to None...

    if self.get_integrater_spacegroup_number():
      correct.set_spacegroup_number(
          self.get_integrater_spacegroup_number())
      if not self._intgr_cell:
        raise RuntimeError, 'no unit cell to recycle'
      correct.set_cell(self._intgr_cell)

    # BUG # 3113 - new version of XDS will try and figure the
    # best spacegroup out from the intensities (and get it wrong!)
    # unless we set the spacegroup and cell explicitly

    if not self.get_integrater_spacegroup_number():
      cell = self._intgr_refiner_cell
      lattice = self._intgr_refiner.get_refiner_lattice()
      spacegroup_number = lattice_to_spacegroup_number(lattice)

      # this should not prevent the postrefinement from
      # working correctly, else what is above would not
      # work correctly (the postrefinement test)

      correct.set_spacegroup_number(spacegroup_number)
      correct.set_cell(cell)

      Debug.write('Setting spacegroup to: %d' % spacegroup_number)
      Debug.write(
        'Setting cell to: %.2f %.2f %.2f %.2f %.2f %.2f' % tuple(cell))

    if self.get_integrater_reindex_matrix():

      # bug! if the lattice is not primitive the values in this
      # reindex matrix need to be multiplied by a constant which
      # depends on the Bravais lattice centering.

      lattice = self._intgr_refiner.get_refiner_lattice()

      import scitbx.matrix
      matrix = self.get_integrater_reindex_matrix()
      matrix = scitbx.matrix.sqr(matrix).transpose().elems
      matrix = r_to_rt(matrix)

      if lattice[1] == 'P':
        mult = 1
      elif lattice[1] == 'C' or lattice[1] == 'I':
        mult = 2
      elif lattice[1] == 'R':
        mult = 3
      elif lattice[1] == 'F':
        mult = 4
      else:
        raise RuntimeError, 'unknown multiplier for lattice %s' % \
              lattice

      Debug.write('REIDX multiplier for lattice %s: %d' % \
                  (lattice, mult))

      mult_matrix = [mult * m for m in matrix]

      Debug.write('REIDX set to %d %d %d %d %d %d %d %d %d %d %d %d' % \
                  tuple(mult_matrix))
      correct.set_reindex_matrix(mult_matrix)

    correct.run()

    # erm. just to be sure
    if self.get_integrater_reindex_matrix() and \
           correct.get_reindex_used():
      raise RuntimeError, 'Reindex panic!'

    # get the reindex operation used, which may be useful if none was
    # set but XDS decided to apply one, e.g. #419.

    if not self.get_integrater_reindex_matrix() and \
           correct.get_reindex_used():
      # convert this reindex operation to h, k, l form: n.b. this
      # will involve dividing through by the lattice centring multiplier

      matrix = rt_to_r(correct.get_reindex_used())
      import scitbx.matrix
      matrix = scitbx.matrix.sqr(matrix).transpose().elems

      lattice = self._intgr_refiner.get_refiner_lattice()

      if lattice[1] == 'P':
        mult = 1.0
      elif lattice[1] == 'C' or lattice[1] == 'I':
        mult = 2.0
      elif lattice[1] == 'R':
        mult = 3.0
      elif lattice[1] == 'F':
        mult = 4.0

      matrix = [m / mult for m in matrix]

      reindex_op = mat_to_symop(matrix)

      # assign this to self: will this reset?! make for a leaky
      # abstraction and just assign this...

      # self.set_integrater_reindex_operator(reindex)

      self._intgr_reindex_operator = reindex_op


    # record the log file -

    pname, xname, dname = self.get_integrater_project_info()
    sweep = self.get_integrater_sweep_name()
    FileHandler.record_log_file('%s %s %s %s CORRECT' % \
                                (pname, xname, dname, sweep),
                                os.path.join(self.get_working_directory(),
                                             'CORRECT.LP'))

    # should get some interesting stuff from the XDS correct file
    # here, for instance the resolution range to use in integration
    # (which should be fed back if not fast) and so on...

    self._intgr_corrected_hklout = os.path.join(self.get_working_directory(),
                                'XDS_ASCII.HKL')

    # also record the batch range - needed for the analysis of the
    # radiation damage in chef...

    self._intgr_batches_out = (self._intgr_wedge[0],
                               self._intgr_wedge[1])

    # FIXME perhaps I should also feedback the GXPARM file here??
    for file in ['GXPARM.XDS']:
      self._xds_data_files[file] = correct.get_output_data_file(file)

    # record the postrefined cell parameters
    self._intgr_cell = correct.get_result('cell')
    self._intgr_n_ref = correct.get_result('n_ref')

    Debug.write('Postrefinement in "correct" spacegroup results:')
    Debug.write('%7.3f %7.3f %7.3f %7.3f %7.3f %7.3f' % \
                tuple(correct.get_result('cell')))
    Debug.write('%7.3f %7.3f %7.3f %7.3f %7.3f %7.3f' % \
                tuple(correct.get_result('cell_esd')))
    Debug.write('Deviations: %.2f pixels %.2f degrees' % \
                (correct.get_result('rmsd_pixel'),
                 correct.get_result('rmsd_phi')))

    Debug.write('Error correction parameters: A=%.3f B=%.3f' % \
                correct.get_result('sdcorrection'))

    # compute misorientation of axes

    xparm_file = os.path.join(self.get_working_directory(), 'GXPARM.XDS')

    from iotbx.xds import xparm
    handle = xparm.reader()
    handle.read_file(xparm_file)

    rotn = handle.rotation_axis
    beam = handle.beam_vector

    dot = sum([rotn[j] * beam[j] for j in range(3)])
    r = math.sqrt(sum([rotn[j] * rotn[j] for j in range(3)]))
    b = math.sqrt(sum([beam[j] * beam[j] for j in range(3)]))

    rtod = 180.0 / math.pi

    angle = rtod * math.fabs(0.5 * math.pi - math.acos(dot / (r * b)))

    Debug.write('Axis misalignment %.2f degrees' % angle)

    correct_deviations = (correct.get_result('rmsd_pixel'),
                          correct.get_result('rmsd_phi'))

    if p1_deviations:
      # compare and reject if both > 50% higher - though adding a little
      # flexibility - 0.5 pixel / osc width slack.

      pixel = p1_deviations[0]
      phi = math.sqrt(0.05 * 0.05 + \
                      p1_deviations[1] * p1_deviations[1])

      threshold = Flags.get_rejection_threshold()

      Debug.write('RMSD ratio: %.2f' % (correct_deviations[0] / pixel))
      Debug.write('RMSPhi ratio: %.2f' % (correct_deviations[1] / phi))

      if correct_deviations[0] / pixel > threshold and \
             correct_deviations[1] / phi > threshold:

        Chatter.write(
        'Eliminating this indexing solution as postrefinement')
        Chatter.write(
        'deviations rather high relative to triclinic')
        raise BadLatticeError, \
              'high relative deviations in postrefinement'

    if not Flags.get_quick() and Flags.get_remove():
      # check for alien reflections and perhaps recycle - removing them
      if len(correct.get_remove()) > 0:

        correct_remove = correct.get_remove()
        current_remove = []
        final_remove = []

        # first ensure that there are no duplicate entries...
        if os.path.exists(os.path.join(
            self.get_working_directory(),
            'REMOVE.HKL')):
          for line in open(os.path.join(
              self.get_working_directory(),
              'REMOVE.HKL'), 'r').readlines():
            h, k, l = map(int, line.split()[:3])
            z = float(line.split()[3])

            if not (h, k, l, z) in current_remove:
              current_remove.append((h, k, l, z))

          for c in correct_remove:
            if c in current_remove:
              continue
            final_remove.append(c)

          Debug.write(
              '%d alien reflections are already removed' % \
              (len(correct_remove) - len(final_remove)))
        else:
          # we want to remove all of the new dodgy reflections
          final_remove = correct_remove

        remove_hkl = open(os.path.join(
            self.get_working_directory(),
            'REMOVE.HKL'), 'w')

        z_min = Flags.get_z_min()
        rejected = 0

        # write in the old reflections
        for remove in current_remove:
          z = remove[3]
          if z >= z_min:
            remove_hkl.write('%d %d %d %f\n' % remove)
          else:
            rejected += 1
        Debug.write('Wrote %d old reflections to REMOVE.HKL' % \
                    (len(current_remove) - rejected))
        Debug.write('Rejected %d as z < %f' % \
                    (rejected, z_min))

        # and the new reflections
        rejected = 0
        used = 0
        for remove in final_remove:
          z = remove[3]
          if z >= z_min:
            used += 1
            remove_hkl.write('%d %d %d %f\n' % remove)
          else:
            rejected += 1
        Debug.write('Wrote %d new reflections to REMOVE.HKL' % \
                    (len(final_remove) - rejected))
        Debug.write('Rejected %d as z < %f' % \
                    (rejected, z_min))

        remove_hkl.close()

        # we want to rerun the finishing step so...
        # unless we have added no new reflections... or unless we
        # have not confirmed the point group (see SCI-398)

        if used and self.get_integrater_reindex_matrix():
          self.set_integrater_finish_done(False)

    else:
      Debug.write(
          'Going quickly so not removing %d outlier reflections...' % \
          len(correct.get_remove()))

    # Convert INTEGRATE.HKL to MTZ format and reapply any reindexing operations
    # spacegroup changes to allow use with CCP4 / Aimless for scaling

    integrate_hkl = os.path.join(
      self.get_working_directory(), 'INTEGRATE.HKL')

    hklout = os.path.splitext(integrate_hkl)[0] + ".mtz"
    self._factory.set_working_directory(self.get_working_directory())
    pointless = self._factory.Pointless()
    pointless.set_xdsin(integrate_hkl)
    pointless.set_hklout(hklout)
    pointless.xds_to_mtz()

    integrate_mtz = hklout

    if self.get_integrater_reindex_operator() or \
       self.get_integrater_spacegroup_number():

      Debug.write('Reindexing things to MTZ')

      reindex = Reindex()
      reindex.set_working_directory(self.get_working_directory())
      auto_logfiler(reindex)

      if self.get_integrater_reindex_operator():
        reindex.set_operator(self.get_integrater_reindex_operator())

      if self.get_integrater_spacegroup_number():
        reindex.set_spacegroup(self.get_integrater_spacegroup_number())

      hklout = '%s_reindex.mtz' % os.path.splitext(integrate_mtz)[0]

      reindex.set_hklin(integrate_mtz)
      reindex.set_hklout(hklout)
      reindex.reindex()
      integrate_mtz = hklout

    return integrate_mtz
Example #55
0
  def _scale(self):
    '''Perform all of the operations required to deliver the scaled
    data.'''

    epochs = self._sweep_handler.get_epochs()

    if PhilIndex.params.xia2.settings.optimize_scaling:
      self._determine_best_scale_model_8way()
    else:
      self._scalr_corrections = True
      self._scalr_correct_absorption = True
      self._scalr_correct_partiality = False
      self._scalr_correct_decay = True

    if self._scalr_corrections:
      Journal.block(
          'scaling', self.get_scaler_xcrystal().get_name(), 'CCP4',
          {'scaling model':'automatic',
           'absorption':self._scalr_correct_absorption,
           'tails':self._scalr_correct_partiality,
           'decay':self._scalr_correct_decay
           })

    else:
      Journal.block(
          'scaling', self.get_scaler_xcrystal().get_name(), 'CCP4',
          {'scaling model':'default'})

    sc = self._updated_aimless()
    sc.set_hklin(self._prepared_reflections)
    sc.set_intensities(PhilIndex.params.ccp4.aimless.intensities)

    sc.set_chef_unmerged(True)

    sc.set_new_scales_file('%s.scales' % self._scalr_xname)

    user_resolution_limits = { }

    for epoch in epochs:

      si = self._sweep_handler.get_sweep_information(epoch)
      pname, xname, dname = si.get_project_info()
      sname = si.get_sweep_name()
      intgr = si.get_integrater()

      if intgr.get_integrater_user_resolution():
        dmin = intgr.get_integrater_high_resolution()

        if not user_resolution_limits.has_key((dname, sname)):
          user_resolution_limits[(dname, sname)] = dmin
        elif dmin < user_resolution_limits[(dname, sname)]:
          user_resolution_limits[(dname, sname)] = dmin

      start, end = si.get_batch_range()

      if (dname, sname) in self._scalr_resolution_limits:
        resolution = self._scalr_resolution_limits[(dname, sname)]
        sc.add_run(start, end, exclude = False,
                   resolution = resolution, name = sname)
      else:
        sc.add_run(start, end, name = sname)

    sc.set_hklout(os.path.join(self.get_working_directory(),
                               '%s_%s_scaled_test.mtz' % \
                               (self._scalr_pname, self._scalr_xname)))

    if self.get_scaler_anomalous():
      sc.set_anomalous()

    # what follows, sucks

    if Flags.get_failover():

      try:
        sc.scale()
      except RuntimeError, e:

        es = str(e)

        if 'bad batch' in es or \
               'negative scales run' in es or \
               'no observations' in es:

          # first ID the sweep from the batch no

          batch = int(es.split()[-1])
          epoch = self._identify_sweep_epoch(batch)
          sweep = self._scalr_integraters[
              epoch].get_integrater_sweep()

          # then remove it from my parent xcrystal

          self.get_scaler_xcrystal().remove_sweep(sweep)

          # then remove it from the scaler list of intergraters
          # - this should really be a scaler interface method

          del(self._scalr_integraters[epoch])

          # then tell the user what is happening

          Chatter.write(
              'Sweep %s gave negative scales - removing' % \
              sweep.get_name())

          # then reset the prepare, do, finish flags

          self.set_scaler_prepare_done(False)
          self.set_scaler_done(False)
          self.set_scaler_finish_done(False)

          # and return

          return

        else:

          raise e
Example #56
0
def compute_nmol_from_volume(volume, mass, resolution):

    file = open(nmolparams, "r")

    while True:
        line = file.readline()
        if not line[0] == "#":
            break

    resolutions = map(float, line.split(","))[:13]
    P0_list = map(float, file.readline().split(","))[:13]
    Vm_bar_list = map(float, file.readline().split(","))[:13]
    w_list = map(float, file.readline().split(","))[:13]
    A_list = map(float, file.readline().split(","))[:13]
    s_list = map(float, file.readline().split(","))[:13]
    file.close()

    if resolution > resolutions[-1]:
        Chatter.write(
            "Resolution lower than %s -> computing for %f"
            % (resolutions[-1], resolution)
        )
        resolution = resolutions[-1]
    if resolution < resolutions[0]:
        Chatter.write(
            "Resolution higher than peak %f -> %f" % (resolution, resolutions[0])
        )
        resolution = resolutions[0]

    start = 0

    for start in range(12):
        if resolution > resolutions[start] and resolution < resolutions[start + 1]:
            break

    # can start at start - interpolate

    diff = (resolution - resolutions[start]) / (
        resolutions[start + 1] - resolutions[start]
    )

    P0 = P0_list[start] + diff * (P0_list[start + 1] - P0_list[start])
    Vm_bar = Vm_bar_list[start] + diff * (Vm_bar_list[start + 1] - Vm_bar_list[start])
    w = w_list[start] + diff * (w_list[start + 1] - w_list[start])
    A = A_list[start] + diff * (A_list[start + 1] - A_list[start])
    s = s_list[start] + diff * (s_list[start + 1] - s_list[start])

    nmols = []
    pdfs = []

    nmol = 1
    Vm = volume / (mass * nmol)
    while True:
        z = (Vm - Vm_bar) / w
        p = P0 + A * math.exp(-math.exp(-z) - z * s + 1)
        nmols.append(nmol)
        pdfs.append(p)
        nmol += 1
        Vm = volume / (mass * nmol)
        if Vm < 1.0:
            break

    return nmols, pdfs
Example #57
0
File: XSweep.py Project: xia2/xia2
      if bl_info:
        from xia2.Handlers.CIF import CIF, mmCIF
        cifblock, mmcifblock = bl_info.CIF_block(), bl_info.mmCIF_block()
        if cifblock:
          CIF.set_block(bl_info.get_block_name(), cifblock)
        if mmcifblock:
          mmCIF.set_block(bl_info.get_block_name(), mmcifblock)

if __name__ == '__main__':
  directory = os.path.join('z:', 'data', '12287')

  image = '12287_1_E1_001.img'

  xs = XSweep('DEMO', None, directory, image)

  xs_descr = str(xs)

  Chatter.write('.')
  for record in xs_descr.split('\n'):
    Chatter.write(record.strip())

  Chatter.write('.')

  Chatter.write('Refined beam is: %6.2f %6.2f' % xs.get_indexer_beam_centre())
  Chatter.write('Distance:        %6.2f' % xs.get_indexer_distance())
  Chatter.write('Cell: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f' % \
                xs.get_indexer_cell())
  Chatter.write('Lattice: %s' % xs.get_indexer_lattice())
  Chatter.write('Mosaic: %6.2f' % xs.get_indexer_mosaic())
  Chatter.write('Hklout: %s' % xs.get_integrater_intensities())
Example #58
0
    def run(self):
      '''Run defpix.'''

      #image_header = self.get_header()

      ## crank through the header dictionary and replace incorrect
      ## information with updated values through the indexer
      ## interface if available...

      ## need to add distance, wavelength - that should be enough...

      #if self.get_distance():
        #image_header['distance'] = self.get_distance()

      #if self.get_wavelength():
        #image_header['wavelength'] = self.get_wavelength()

      #if self.get_two_theta():
        #image_header['two_theta'] = self.get_two_theta()

      header = imageset_to_xds(self.get_imageset())

      xds_inp = open(os.path.join(self.get_working_directory(),
                                  'XDS.INP'), 'w')

      # what are we doing?
      xds_inp.write('JOB=DEFPIX\n')

      for record in header:
        xds_inp.write('%s\n' % record)

      name_template = os.path.join(self.get_directory(),
                                   self.get_template().replace('#', '?'))

      record = 'NAME_TEMPLATE_OF_DATA_FRAMES=%s\n' % \
               name_template

      xds_inp.write(record)

      xds_inp.write('DATA_RANGE=%d %d\n' % self._data_range)

      # include the resolution range, perhaps
      if self._resolution_high > 0.0 or self._resolution_low > 0.0:
        xds_inp.write('INCLUDE_RESOLUTION_RANGE=%.2f %.2f\n' % \
                      (self._resolution_low, self._resolution_high))

      xds_inp.close()


      # copy the input file...
      shutil.copyfile(os.path.join(self.get_working_directory(),
                                   'XDS.INP'),
                      os.path.join(self.get_working_directory(),
                                   '%d_DEFPIX.INP' % self.get_xpid()))

      # write the input data files...

      for file_name in self._input_data_files_list:
        src = self._input_data_files[file_name]
        dst = os.path.join(
            self.get_working_directory(), file_name)
        if src != dst:
          shutil.copyfile(src, dst)

      self.start()
      self.close_wait()

      xds_check_version_supported(self.get_all_output())

      # copy the LP file
      shutil.copyfile(os.path.join(self.get_working_directory(),
                                   'DEFPIX.LP'),
                      os.path.join(self.get_working_directory(),
                                   '%d_DEFPIX.LP' % self.get_xpid()))

      # check the resolution asked for is achievable (if set)
      for record in open(os.path.join(self.get_working_directory(),
                                      'DEFPIX.LP')):
        if 'RESOLUTION RANGE RECORDED BY DETECTOR' in record:
          real_high = float(record.split()[-1])
          if self._resolution_high:
            if real_high > self._resolution_high + 0.01:
              Chatter.write(
                  'Warning: resolution limited to %.2f' % \
                  real_high)


      # gather the output files

      for file in self._output_data_files_list:
        self._output_data_files[file] = os.path.join(
          self.get_working_directory(), file)

      return
Example #59
0
    def _index(self):
        '''Implement the indexer interface.'''

        Citations.cite('mosflm')

        indexer = MosflmIndex()
        indexer.set_working_directory(self.get_working_directory())
        auto_logfiler(indexer)

        from xia2.lib.bits import unique_elements
        _images = unique_elements(self._indxr_images)
        indexer.set_images(_images)
        images_str = ', '.join(map(str, _images))

        cell_str = None
        if self._indxr_input_cell:
            cell_str = '%.2f %.2f %.2f %.2f %.2f %.2f' % \
                        self._indxr_input_cell

        if self._indxr_sweep_name:

            #if len(self._fp_directory) <= 50:
            #dirname = self._fp_directory
            #else:
            #dirname = '...%s' % self._fp_directory[-46:]
            dirname = os.path.dirname(self.get_imageset().get_template())

            Journal.block(
                'autoindexing', self._indxr_sweep_name, 'mosflm', {
                    'images': images_str,
                    'target cell': self._indxr_input_cell,
                    'target lattice': self._indxr_input_lattice,
                    'template': self.get_imageset().get_template(),
                    'directory': dirname
                })

        #task = 'Autoindex from images:'

        #for i in _images:
        #task += ' %s' % self.get_image_name(i)

        #self.set_task(task)

        indexer.set_template(os.path.basename(self.get_template()))
        indexer.set_directory(self.get_directory())

        xsweep = self.get_indexer_sweep()
        if xsweep is not None:
            if xsweep.get_distance() is not None:
                indexer.set_distance(xsweep.get_distance())
            #if self.get_wavelength_prov() == 'user':
            #index.set_wavelength(self.get_wavelength())
            if xsweep.get_beam_centre() is not None:
                indexer.set_beam_centre(xsweep.get_beam_centre())

        if self._indxr_input_cell:
            indexer.set_unit_cell(self._indxr_input_cell)

        if self._indxr_input_lattice is not None:
            spacegroup_number = lattice_to_spacegroup(
                self._indxr_input_lattice)
            indexer.set_space_group_number(spacegroup_number)

        if not self._mosflm_autoindex_thresh:

            try:

                min_peaks = 200

                Debug.write('Aiming for at least %d spots...' % min_peaks)

                thresholds = []

                for i in _images:

                    p = Printpeaks()
                    p.set_working_directory(self.get_working_directory())
                    auto_logfiler(p)
                    p.set_image(self.get_image_name(i))
                    thresh = p.threshold(min_peaks)

                    Debug.write('Autoindex threshold for image %d: %d' % \
                                (i, thresh))

                    thresholds.append(thresh)

                thresh = min(thresholds)
                self._mosflm_autoindex_thresh = thresh

            except Exception as e:
                print str(e)  #XXX this should disappear!
                Debug.write('Error computing threshold: %s' % str(e))
                Debug.write('Using default of 20.0')
                thresh = 20.0

        else:
            thresh = self._mosflm_autoindex_thresh

        Debug.write('Using autoindex threshold: %d' % thresh)

        if self._mosflm_autoindex_sol:
            indexer.set_solution_number(self._mosflm_autoindex_sol)
        indexer.set_threshold(thresh)

        # now forget this to prevent weird things happening later on
        if self._mosflm_autoindex_sol:
            self._mosflm_autoindex_sol = 0

        indexer.run()

        indxr_cell = indexer.get_refined_unit_cell()
        self._indxr_lattice = indexer.get_lattice()
        space_group_number = indexer.get_indexed_space_group_number()
        detector_distance = indexer.get_refined_distance()
        beam_centre = indexer.get_refined_beam_centre()
        mosaic_spreads = indexer.get_mosaic_spreads()

        if min(list(indxr_cell)) < 10.0 and \
           indxr_cell[2] / indxr_cell[0] > 6:

            Debug.write('Unrealistic autoindexing solution: ' +
                        '%.2f %.2f %.2f %.2f %.2f %.2f' % indxr_cell)

            # tweak some parameters and try again...
            self._mosflm_autoindex_thresh *= 1.5
            self.set_indexer_done(False)

            return

        intgr_params = {}

        # look up other possible indexing solutions (not well - in
        # standard settings only!) This is moved earlier as it could
        # result in returning if Mosflm has selected the wrong
        # solution!

        try:
            self._indxr_other_lattice_cell = indexer.get_solutions()

            # Change 27/FEB/08 to support user assigned spacegroups
            if self._indxr_user_input_lattice:
                lattice_to_spacegroup_dict = {
                    'aP': 1,
                    'mP': 3,
                    'mC': 5,
                    'oP': 16,
                    'oC': 20,
                    'oF': 22,
                    'oI': 23,
                    'tP': 75,
                    'tI': 79,
                    'hP': 143,
                    'hR': 146,
                    'cP': 195,
                    'cF': 196,
                    'cI': 197
                }
                for k in self._indxr_other_lattice_cell.keys():
                    if lattice_to_spacegroup_dict[k] > \
                           lattice_to_spacegroup_dict[
                        self._indxr_input_lattice]:
                        del (self._indxr_other_lattice_cell[k])

            # check that the selected unit cell matches - and if
            # not raise a "horrible" exception

            if self._indxr_input_cell:
                assert indxr_cell is not None
                for j in range(6):
                    if math.fabs(self._indxr_input_cell[j] -
                                 indxr_cell[j]) > 2.0:
                        Chatter.write('Mosflm autoindexing did not select ' +
                                      'correct (target) unit cell')
                        raise RuntimeError(
                            'something horrible happened in indexing')

        except RuntimeError as e:
            # check if mosflm rejected a solution we have it
            if 'horribl' in str(e):
                # ok it did - time to break out the big guns...
                if not self._indxr_input_cell:
                    raise RuntimeError(
                        'error in solution selection when not preset')

                # XXX FIXME
                self._mosflm_autoindex_sol = _get_indexing_solution_number(
                    indexer.get_all_output(), self._indxr_input_cell,
                    self._indxr_input_lattice)

                # set the fact that we are not done...
                self.set_indexer_done(False)

                # and return - hopefully this will restart everything
                return
            else:
                raise e

        if len(mosaic_spreads) == 0:
            # then consider setting it do a default value...
            # equal to the oscillation width (a good guess)
            phi_width = self.get_phi_width()
            Chatter.write(
                'Mosaic estimation failed, so guessing at %4.2f' % \
                phi_width)
            # only consider this if we have thus far no idea on the
            # mosaic spread...
            mosaic_spreads.append(phi_width)

        intgr_params['raster'] = indexer.get_raster()

        intgr_params['separation'] = indexer.get_separation()

        self._indxr_resolution_estimate = indexer.get_resolution_estimate()

        # compute mosaic as mean(mosaic_spreads)

        self._indxr_mosaic = sum(mosaic_spreads) / len(mosaic_spreads)

        self._indxr_payload['mosflm_integration_parameters'] = intgr_params

        self._indxr_payload['mosflm_orientation_matrix'] = open(
            os.path.join(self.get_working_directory(), 'xiaindex.mat'),
            'r').readlines()

        import copy
        from dxtbx.model.detector_helpers import set_mosflm_beam_centre
        from xia2.Wrappers.Mosflm.AutoindexHelpers import set_distance
        from xia2.Wrappers.Mosflm.AutoindexHelpers import crystal_model_from_mosflm_mat
        from cctbx import sgtbx, uctbx

        # update the beam centre (i.e. shift the origin of the detector)
        detector = copy.deepcopy(self.get_detector())
        beam = copy.deepcopy(self.get_beam())
        set_mosflm_beam_centre(detector, beam, beam_centre)
        if detector_distance is not None:
            set_distance(detector, detector_distance)

        # make a dxtbx crystal_model object from the mosflm matrix
        space_group = sgtbx.space_group_info(number=space_group_number).group()
        crystal_model = crystal_model_from_mosflm_mat(
            self._indxr_payload['mosflm_orientation_matrix'],
            unit_cell=uctbx.unit_cell(tuple(indxr_cell)),
            space_group=space_group)

        # construct an experiment_list
        from dxtbx.model import Experiment, ExperimentList
        experiment = Experiment(beam=beam,
                                detector=detector,
                                goniometer=self.get_goniometer(),
                                scan=self.get_scan(),
                                crystal=crystal_model)

        experiment_list = ExperimentList([experiment])
        self.set_indexer_experiment_list(experiment_list)
Example #60
0
  def _mosflm_integrate(self):
    '''Perform the actual integration, based on the results of the
    cell refinement or indexing (they have the equivalent form.)'''

    refinr = self.get_integrater_refiner()

    if not refinr.get_refiner_payload('mosflm_orientation_matrix'):
      raise RuntimeError, 'unexpected situation in indexing'

    lattice = refinr.get_refiner_lattice()
    spacegroup_number = lattice_to_spacegroup(lattice)
    mosaic = refinr.get_refiner_payload('mosaic')
    beam = refinr.get_refiner_payload('beam')
    distance = refinr.get_refiner_payload('distance')
    matrix = refinr.get_refiner_payload('mosflm_orientation_matrix')

    integration_params = refinr.get_refiner_payload(
      'mosflm_integration_parameters')

    if integration_params:
      if 'separation' in integration_params:
        self.set_integrater_parameter(
          'mosflm', 'separation',
          '%s %s' % tuple(integration_params['separation']))
      if 'raster' in integration_params:
        self.set_integrater_parameter(
          'mosflm', 'raster',
          '%d %d %d %d %d' % tuple(integration_params['raster']))

    refinr.set_refiner_payload('mosflm_integration_parameters', None)

    f = open(os.path.join(self.get_working_directory(),
                          'xiaintegrate.mat'), 'w')
    for m in matrix:
      f.write(m)
    f.close()

    # then start the integration
    integrater = MosflmIntegrate()
    integrater.set_working_directory(self.get_working_directory())
    auto_logfiler(integrater)

    integrater.set_refine_profiles(self._mosflm_refine_profiles)

    pname, xname, dname = self.get_integrater_project_info()

    if pname is not None and xname is not None and dname is not None:
      Debug.write('Harvesting: %s/%s/%s' % (pname, xname, dname))
      harvest_dir = self.get_working_directory()
      # harvest file name will be %s.mosflm_run_start_end % dname
      temp_dname = '%s_%s' % \
                   (dname, self.get_integrater_sweep_name())
      integrater.set_pname_xname_dname(pname, xname, temp_dname)

    integrater.set_template(os.path.basename(self.get_template()))
    integrater.set_directory(self.get_directory())

    # check for ice - and if so, exclude (ranges taken from
    # XDS documentation)
    if self.get_integrater_ice() != 0:
      Debug.write('Excluding ice rings')
      integrater.set_exclude_ice(True)

    # exclude specified resolution ranges
    if len(self.get_integrater_excluded_regions()) != 0:
      regions = self.get_integrater_excluded_regions()
      Debug.write('Excluding regions: %s' % `regions`)
      integrater.set_exclude_regions(regions)

    mask = standard_mask(self.get_detector())
    for m in mask:
      integrater.add_instruction(m)

    integrater.set_input_mat_file('xiaintegrate.mat')

    integrater.set_beam_centre(beam)
    integrater.set_distance(distance)
    integrater.set_space_group_number(spacegroup_number)
    integrater.set_mosaic(mosaic)

    if self.get_wavelength_prov() == 'user':
      integrater.set_wavelength(self.get_wavelength())

    parameters = self.get_integrater_parameters('mosflm')
    integrater.update_parameters(parameters)

    if self._mosflm_gain:
      integrater.set_gain(self._mosflm_gain)

    # check for resolution limits
    if self._intgr_reso_high > 0.0:
      integrater.set_d_min(self._intgr_reso_high)
    if self._intgr_reso_low:
      integrater.set_d_max(self._intgr_reso_low)

    if PhilIndex.params.general.backstop_mask:
      from xia2.Toolkit.BackstopMask import BackstopMask
      mask = BackstopMask(PhilIndex.params.general.backstop_mask)
      mask = mask.calculate_mask_mosflm(self.get_header())
      integrater.set_mask(mask)

    detector = self.get_detector()
    detector_width, detector_height = detector[0].get_image_size_mm()

    lim_x = 0.5 * detector_width
    lim_y = 0.5 * detector_height

    Debug.write('Scanner limits: %.1f %.1f' % (lim_x, lim_y))
    integrater.set_limits(lim_x, lim_y)

    integrater.set_fix_mosaic(self._mosflm_postref_fix_mosaic)
    offset = self.get_frame_offset()

    integrater.set_image_range(
      (self._intgr_wedge[0] - offset, self._intgr_wedge[1] - offset))

    try:
      integrater.run()
    except RuntimeError, e:
      if 'integration failed: reason unknown' in str(e):
        Chatter.write('Mosflm has failed in integration')
        message = 'The input was:\n\n'
        for input in integrater.get_all_input():
          message += '  %s' % input
        Chatter.write(message)
      raise