Ejemplo n.º 1
0
 def set_xinfo(self, xinfo):
     logger.debug(60 * "-")
     logger.debug("XINFO file: %s" % xinfo)
     with open(xinfo, "rU") as fh:
         logger.debug(fh.read().strip())
     logger.debug(60 * "-")
     self._xinfo = XProject(xinfo)
Ejemplo n.º 2
0
 def set_xinfo(self, xinfo):
     Debug.write(60 * '-')
     Debug.write('XINFO file: %s' % xinfo)
     with open(xinfo, 'rU') as fh:
         Debug.write(fh.read().strip())
     Debug.write(60 * '-')
     self._xinfo = XProject(xinfo)
Ejemplo n.º 3
0
 def set_xinfo(self, xinfo):
     Debug.write(60 * "-")
     Debug.write("XINFO file: %s" % xinfo)
     with open(xinfo, "rU") as fh:
         Debug.write(fh.read().strip())
     Debug.write(60 * "-")
     self._xinfo = XProject(xinfo)
Ejemplo n.º 4
0
def run():
    assert os.path.exists("xia2.json")
    from xia2.Schema.XProject import XProject

    xinfo = XProject.from_json(filename="xia2.json")
    Chatter.write(xinfo.get_output())
    write_citations()
Ejemplo n.º 5
0
def find_scale_dir():
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')
  crystals = xinfo.get_crystals()
  assert len(crystals) == 1
  crystal = next(crystals.itervalues())
  return os.path.join(crystal.get_name(), 'scale')
Ejemplo n.º 6
0
def run():
    if os.path.exists("xia2-working.phil"):
        sys.argv.append("xia2-working.phil")
    try:
        check_environment()
    except Exception as e:
        traceback.print_exc(file=open("xia2.error", "w"))
        Chatter.write('Status: error "%s"' % str(e))

    # print the version
    Chatter.write(Version)
    Citations.cite("xia2")

    start_time = time.time()

    assert os.path.exists("xia2.json")
    from xia2.Schema.XProject import XProject

    xinfo = XProject.from_json(filename="xia2.json")

    crystals = xinfo.get_crystals()
    for crystal_id, crystal in crystals.iteritems():
        # cwd = os.path.abspath(os.curdir)
        from libtbx import Auto

        scale_dir = PhilIndex.params.xia2.settings.scale.directory
        if scale_dir is Auto:
            scale_dir = "scale"
            i = 0
            while os.path.exists(os.path.join(crystal.get_name(), scale_dir)):
                i += 1
                scale_dir = "scale%i" % i
            PhilIndex.params.xia2.settings.scale.directory = scale_dir
        working_directory = Environment.generate_directory(
            [crystal.get_name(), scale_dir])
        # os.chdir(working_directory)

        crystals[crystal_id]._scaler = None  # reset scaler

        scaler = crystal._get_scaler()
        Chatter.write(xinfo.get_output())
        crystal.serialize()

    duration = time.time() - start_time

    # write out the time taken in a human readable way
    Chatter.write("Processing took %s" %
                  time.strftime("%Hh %Mm %Ss", time.gmtime(duration)))

    # delete all of the temporary mtz files...
    cleanup()

    write_citations()

    xinfo.as_json(filename="xia2.json")

    Environment.cleanup()
Ejemplo n.º 7
0
Archivo: html.py Proyecto: hainm/xia2
def run():
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  rst = get_xproject_rst(xinfo)

  with open('xia2.html', 'wb') as f:
    print >> f, rst2html(rst)
Ejemplo n.º 8
0
def ispyb_object():
    assert os.path.exists("xia2.json")
    assert os.path.exists("xia2.txt")
    command_line = ""
    for record in open("xia2.txt"):
        if record.startswith("Command line:"):
            command_line = record.replace("Command line:", "").strip()
    xinfo = XProject.from_json(filename="xia2.json")
    crystals = xinfo.get_crystals()
    assert len(crystals) == 1
    crystal = next(iter(crystals.values()))
    ispyb_hdl = ISPyBXmlHandler(xinfo)
    ispyb_hdl.add_xcrystal(crystal)
    return ispyb_hdl.json_object(command_line=command_line)
Ejemplo n.º 9
0
def run():
    if os.path.exists("xia2-working.phil"):
        sys.argv.append("xia2-working.phil")
    try:
        check_environment()
    except Exception as e:
        with open("xia2-error.txt", "w") as fh:
            traceback.print_exc(file=fh)
        logger.error('Status: error "%s"', str(e))

    # print the version
    logger.info(Version)
    Citations.cite("xia2")

    start_time = time.time()

    assert os.path.exists("xia2.json")
    from xia2.Schema.XProject import XProject

    xinfo = XProject.from_json(filename="xia2.json")

    with cleanup(xinfo.path):
        crystals = xinfo.get_crystals()
        for crystal_id, crystal in crystals.items():
            scale_dir = PhilIndex.params.xia2.settings.scale.directory
            if scale_dir is Auto:
                scale_dir = "scale"
                i = 0
                while os.path.exists(os.path.join(crystal.get_name(), scale_dir)):
                    i += 1
                    scale_dir = "scale%i" % i
                PhilIndex.params.xia2.settings.scale.directory = scale_dir

            # reset scaler
            crystals[crystal_id]._scaler = None
            crystal._get_scaler()

            logger.info(xinfo.get_output())
            crystal.serialize()

        duration = time.time() - start_time

        # write out the time taken in a human readable way
        logger.info(
            "Processing took %s", time.strftime("%Hh %Mm %Ss", time.gmtime(duration))
        )

        write_citations()

        xinfo.as_json(filename="xia2.json")
Ejemplo n.º 10
0
def ispyb_object():
    from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler

    assert os.path.exists("xia2.json")
    assert os.path.exists("xia2.txt")
    command_line = ""
    for record in open("xia2.txt"):
        if record.startswith("Command line:"):
            command_line = record.replace("Command line:", "").strip()
    xinfo = XProject.from_json(filename="xia2.json")
    crystals = xinfo.get_crystals()
    assert len(crystals) == 1
    crystal = next(crystals.itervalues())
    ISPyBXmlHandler.add_xcrystal(crystal)
    return ISPyBXmlHandler.json_object(command_line=command_line)
Ejemplo n.º 11
0
def ispyb_xml(xml_out):
  from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
  import os
  assert os.path.exists('xia2.json')
  assert os.path.exists('xia2.txt')
  command_line = ''
  for record in open('xia2.txt'):
    if record.startswith('Command line:'):
      command_line = record.replace('Command line:', '').strip()
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')
  crystals = xinfo.get_crystals()
  assert len(crystals) == 1
  crystal = next(crystals.itervalues())
  ISPyBXmlHandler.add_xcrystal(crystal)
  ISPyBXmlHandler.write_xml(xml_out, command_line)
Ejemplo n.º 12
0
def ispyb_xml(xml_out):
    assert os.path.exists("xia2.json")
    assert os.path.exists("xia2.txt")
    assert os.path.exists("xia2-working.phil")
    command_line = ""
    for record in open("xia2.txt"):
        if record.startswith("Command line:"):
            command_line = record.replace("Command line:", "").strip()
    with open("xia2-working.phil", "rb") as f:
        working_phil = iotbx.phil.parse(f.read())
    xinfo = XProject.from_json(filename="xia2.json")
    crystals = xinfo.get_crystals()
    assert len(crystals) == 1
    crystal = next(crystals.itervalues())
    ISPyBXmlHandler.add_xcrystal(crystal)
    ISPyBXmlHandler.write_xml(xml_out, command_line, working_phil=working_phil)
Ejemplo n.º 13
0
def ispyb_xml(xml_out):
  from xia2.Interfaces.ISPyB.ISPyBXmlHandler import ISPyBXmlHandler
  import os
  assert os.path.exists('xia2.json')
  assert os.path.exists('xia2.txt')
  command_line = ''
  for record in open('xia2.txt'):
    if record.startswith('Command line:'):
      command_line = record.replace('Command line:', '').strip()
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')
  crystals = xinfo.get_crystals()
  assert len(crystals) == 1
  crystal = next(crystals.itervalues())
  ISPyBXmlHandler.add_xcrystal(crystal)
  ISPyBXmlHandler.write_xml(xml_out, command_line)
Ejemplo n.º 14
0
def load_sweeps_with_common_indexing():
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  import dials # required for gaussian_rs warning
  from xia2.Wrappers.Dials.Reindex import Reindex
  Citations.cite('dials')

  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  import cPickle as pickle
  crystals = xinfo.get_crystals()
  assert len(crystals) == 1
  crystal = next(crystals.itervalues())
  working_directory = Environment.generate_directory([crystal.get_name(), 'analysis'])
  os.chdir(working_directory)

  scaler = crystal._get_scaler()

  epoch_to_batches = {}
  epoch_to_integrated_intensities = {}
  epoch_to_sweep_name = {}

  # Aimless only
  epochs = scaler._sweep_handler.get_epochs()

  reference_cell = None
  reference_lattice = None
  reference_vectors = None
  reference_wavelength = None

  # Reindex each sweep to same setting
  all_miller_indices = flex.miller_index()
  all_two_thetas = flex.double()

  for epoch in epochs:
    si = scaler._sweep_handler.get_sweep_information(epoch)
    Chatter.smallbanner(si.get_sweep_name(), True)
    Debug.smallbanner(si.get_sweep_name(), True)

    intgr = si.get_integrater()
    experiments_filename = intgr.get_integrated_experiments()
    reflections_filename = intgr.get_integrated_reflections()
    refiner = intgr.get_integrater_refiner()
    Debug.write('experiment: %s' % experiments_filename)
    Debug.write('reflection: %s' % reflections_filename)

    # Use setting of first sweep as reference
    if reference_vectors is None:
      reference_vectors = experiments_filename

    # Assume that all sweeps have the same lattice system
    if reference_lattice is None:
      reference_lattice = refiner.get_refiner_lattice()
    else:
      assert reference_lattice == refiner.get_refiner_lattice()
    Debug.write("lattice: %s" % refiner.get_refiner_lattice())

    # Read .json file for sweep
    db = ExperimentListFactory.from_json_file(experiments_filename)

    # Assume that each file only contains a single experiment
    assert (len(db) == 1)
    db = db[0]

    # Get beam vector
    s0 = db.beam.get_unit_s0()

    # Use the unit cell of the first sweep as reference
    if reference_cell is None:
      reference_cell = db.crystal.get_unit_cell()
      Debug.write("Reference cell: %s" % str(reference_cell))

    dials_reindex = Reindex()
    dials_reindex.set_working_directory(working_directory)
    dials_reindex.set_cb_op("auto")
    dials_reindex.set_reference_filename(reference_vectors)
    dials_reindex.set_experiments_filename(experiments_filename)
    dials_reindex.set_indexed_filename(reflections_filename)
    auto_logfiler(dials_reindex)
    dials_reindex.run()

    # Assume that all data are collected at same wavelength
    if reference_wavelength is None:
      reference_wavelength = intgr.get_wavelength()
    else:
      assert abs(reference_wavelength - intgr.get_wavelength()) < 0.01
    Debug.write("wavelength: %f A" % intgr.get_wavelength())
    Debug.write("distance: %f mm" % intgr.get_distance())

    # Get integrated reflection data
    import dials
    with open(dials_reindex.get_reindexed_reflections_filename(), 'rb') as fh:
      reflections = pickle.load(fh)

    selection = reflections.get_flags(reflections.flags.used_in_refinement)
    Chatter.write("Found %d reflections used in refinement (out of %d entries)" % (selection.count(True), len(reflections['miller_index'])))
    reflections = reflections.select(selection)

    # Filter bad reflections
    selection = reflections['intensity.sum.variance'] <= 0
    if selection.count(True) > 0:
      reflections.del_selected(selection)
      print 'Removing %d reflections with negative variance' % \
        selection.count(True)

    if 'intensity.prf.variance' in reflections:
      selection = reflections['intensity.prf.variance'] <= 0
      if selection.count(True) > 0:
        reflections.del_selected(selection)
        print 'Removing %d profile reflections with negative variance' % \
          selection.count(True)

    # Find the observed 2theta angles
    miller_indices = flex.miller_index()
    two_thetas_obs = flex.double()
    for pixel, panel, hkl in zip(reflections['xyzobs.px.value'], reflections['panel'], reflections['miller_index']):
      assert hkl != (0, 0, 0)
      two_thetas_obs.append(db.detector[panel].get_two_theta_at_pixel(s0, pixel[0:2]))
      miller_indices.append(hkl)

    # Convert observed 2theta angles to degrees
    two_thetas_obs = two_thetas_obs * 180 / 3.14159265359
    Chatter.write("Remaining %d reflections are in 2theta range %.3f - %.3f deg" % (len(miller_indices), min(two_thetas_obs), max(two_thetas_obs)))

    all_miller_indices.extend(miller_indices)
    all_two_thetas.extend(two_thetas_obs)

  return all_miller_indices, all_two_thetas, reference_cell, reference_lattice, reference_wavelength
Ejemplo n.º 15
0
  try:
    check_environment()
    check()
  except exceptions.Exception, e:
    traceback.print_exc(file = open('xia2.error', 'w'))
    Chatter.write('Status: error "%s"' % str(e))

  # print the version
  Chatter.write(Version)
  Citations.cite('xia2')

  start_time = time.time()

  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  crystals = xinfo.get_crystals()
  for crystal_id, crystal in crystals.iteritems():
    #cwd = os.path.abspath(os.curdir)
    from libtbx import Auto
    scale_dir = PhilIndex.params.xia2.settings.scale.directory
    if scale_dir is Auto:
      scale_dir = 'scale'
      i = 0
      while os.path.exists(os.path.join(crystal.get_name(), scale_dir)):
        i += 1
        scale_dir = 'scale%i' %i
      PhilIndex.params.xia2.settings.scale.directory = scale_dir
    working_directory = Environment.generate_directory(
      [crystal.get_name(), scale_dir])
def reconstruct_rogues(params):
    assert os.path.exists("xia2.json")
    from xia2.Schema.XProject import XProject

    xinfo = XProject.from_json(filename="xia2.json")

    from dxtbx.model.experiment_list import ExperimentListFactory
    import six.moves.cPickle as pickle
    import dials  # because WARNING:root:No profile class gaussian_rs registered

    crystals = xinfo.get_crystals()
    assert len(crystals) == 1

    for xname in crystals:
        crystal = crystals[xname]

    scaler = crystal._get_scaler()

    epochs = scaler._sweep_handler.get_epochs()

    rogues = os.path.join(scaler.get_working_directory(), xname, "scale",
                          "ROGUES")

    rogue_reflections = munch_rogues(rogues)

    batched_reflections = {}

    for epoch in epochs:
        si = scaler._sweep_handler.get_sweep_information(epoch)
        intgr = si.get_integrater()
        experiments = ExperimentListFactory.from_json_file(
            intgr.get_integrated_experiments())
        with open(intgr.get_integrated_reflections(), "rb") as fh:
            reflections = pickle.load(fh)
        batched_reflections[si.get_batch_range()] = (
            experiments,
            reflections,
            si.get_sweep_name(),
        )

    # - look up reflection in reflection list, get bounding box
    # - pull pixels given from image set, flatten these, write out

    from dials.array_family import flex
    from annlib_ext import AnnAdaptor as ann_adaptor

    reflections_run = {}
    for run in batched_reflections:
        reflections_run[run] = []

    for rogue in rogue_reflections:
        b = rogue[0]
        for run in batched_reflections:
            if b >= run[0] and b <= run[1]:
                reflections_run[run].append(rogue)
                break

    for run_no, run in enumerate(reflections_run):
        experiment = batched_reflections[run][0]
        reflections = batched_reflections[run][1]
        name = batched_reflections[run][2]
        rogues = reflections_run[run]
        reference = flex.double()
        scan = experiment.scans()[0]
        images = experiment.imagesets()[0]
        for xyz in reflections["xyzcal.px"]:
            reference.append(xyz[0])
            reference.append(xyz[1])
            reference.append(xyz[2])

        search = flex.double()
        for rogue in rogues:
            search.append(rogue[1])
            search.append(rogue[2])
            search.append(scan.get_array_index_from_angle(rogue[3]))

        ann = ann_adaptor(data=reference, dim=3, k=1)
        ann.query(search)

        keep = flex.bool(len(reflections), False)

        for j, rogue in enumerate(rogues):
            keep[ann.nn[j]] = True

        reflections = reflections.select(keep == True)

        if params.extract:
            reflections["shoebox"] = flex.shoebox(reflections["panel"],
                                                  reflections["bbox"],
                                                  allocate=True)
            reflections.extract_shoeboxes(images, verbose=False)

        if len(reflections_run) > 1:
            output = params.output.reflections.replace(".refl",
                                                       "-%s.refl" % name)
            print("Extracted %d rogue reflections for %s to %s" %
                  (len(reflections), name, output))
            reflections.as_pickle(output)
        else:
            output = params.output.reflections
            print("Extracted %d rogue reflections to %s" %
                  (len(reflections), output))
            reflections.as_pickle(output)
Ejemplo n.º 17
0
def xia2_main(stop_after=None):
  '''Actually process something...'''

  Citations.cite('xia2')

  # print versions of related software
  from dials.util.version import dials_version
  Chatter.write(dials_version())

  start_time = time.time()

  CommandLine = get_command_line()
  start_dir = Flags.get_starting_directory()

  # check that something useful has been assigned for processing...
  xtals = CommandLine.get_xinfo().get_crystals()

  no_images = True

  for name in xtals.keys():
    xtal = xtals[name]

    if not xtal.get_all_image_names():

      Chatter.write('-----------------------------------' + \
                    '-' * len(name))
      Chatter.write('| No images assigned for crystal %s |' % name)
      Chatter.write('-----------------------------------' + '-' \
                    * len(name))
    else:
      no_images = False

  args = []

  from xia2.Handlers.Phil import PhilIndex
  params = PhilIndex.get_python_object()
  mp_params = params.xia2.settings.multiprocessing
  njob = mp_params.njob

  from libtbx import group_args

  xinfo = CommandLine.get_xinfo()

  if os.path.exists('xia2.json'):
    from xia2.Schema.XProject import XProject
    xinfo_new = xinfo
    xinfo = XProject.from_json(filename='xia2.json')

    crystals = xinfo.get_crystals()
    crystals_new = xinfo_new.get_crystals()
    for crystal_id in crystals_new.keys():
      if crystal_id not in crystals:
        crystals[crystal_id] = crystals_new[crystal_id]
        continue
      crystals[crystal_id]._scaler = None # reset scaler
      for wavelength_id in crystals_new[crystal_id].get_wavelength_names():
        wavelength_new = crystals_new[crystal_id].get_xwavelength(wavelength_id)
        if wavelength_id not in crystals[crystal_id].get_wavelength_names():
          crystals[crystal_id].add_wavelength(
            crystals_new[crystal_id].get_xwavelength(wavelength_new))
          continue
        wavelength = crystals[crystal_id].get_xwavelength(wavelength_id)
        sweeps_new = wavelength_new.get_sweeps()
        sweeps = wavelength.get_sweeps()
        sweep_names = [s.get_name() for s in sweeps]
        sweep_keys = [
          (s.get_directory(), s.get_template(), s.get_image_range())
          for s in sweeps]
        for sweep in sweeps_new:
          if ((sweep.get_directory(), sweep.get_template(),
               sweep.get_image_range()) not in sweep_keys):
            if sweep.get_name() in sweep_names:
              i = 1
              while 'SWEEEP%i' %i in sweep_names:
                i += 1
              sweep._name = 'SWEEP%i' %i
              break
            wavelength.add_sweep(
              name=sweep.get_name(),
              directory=sweep.get_directory(),
              image=sweep.get_image(),
              beam=sweep.get_beam_centre(),
              reversephi=sweep.get_reversephi(),
              distance=sweep.get_distance(),
              gain=sweep.get_gain(),
              dmin=sweep.get_resolution_high(),
              dmax=sweep.get_resolution_low(),
              polarization=sweep.get_polarization(),
              frames_to_process=sweep.get_frames_to_process(),
              user_lattice=sweep.get_user_lattice(),
              user_cell=sweep.get_user_cell(),
              epoch=sweep._epoch,
              ice=sweep._ice,
              excluded_regions=sweep._excluded_regions,
            )
            sweep_names.append(sweep.get_name())

  crystals = xinfo.get_crystals()

  failover = params.xia2.settings.failover

  if njob > 1:
    driver_type = mp_params.type
    command_line_args = CommandLine.get_argv()[1:]
    for crystal_id in crystals.keys():
      for wavelength_id in crystals[crystal_id].get_wavelength_names():
        wavelength = crystals[crystal_id].get_xwavelength(wavelength_id)
        sweeps = wavelength.get_sweeps()
        for sweep in sweeps:
          sweep._get_indexer()
          sweep._get_refiner()
          sweep._get_integrater()
          args.append((
            group_args(
              driver_type=driver_type,
              stop_after=stop_after,
              failover=failover,
              command_line_args=command_line_args,
              nproc=mp_params.nproc,
              crystal_id=crystal_id,
              wavelength_id=wavelength_id,
              sweep_id=sweep.get_name(),
              ),))

    from xia2.Driver.DriverFactory import DriverFactory
    default_driver_type = DriverFactory.get_driver_type()

    # run every nth job on the current computer (no need to submit to qsub)
    for i_job, arg in enumerate(args):
      if (i_job % njob) == 0:
        arg[0].driver_type = default_driver_type

    if mp_params.type == "qsub":
      method = "sge"
    else:
      method = "multiprocessing"
    nproc = mp_params.nproc
    qsub_command = mp_params.qsub_command
    if not qsub_command:
      qsub_command = 'qsub'
    qsub_command = '%s -V -cwd -pe smp %d' %(qsub_command, nproc)

    from libtbx import easy_mp
    results = easy_mp.parallel_map(
      process_one_sweep, args, processes=njob,
      #method=method,
      method="multiprocessing",
      qsub_command=qsub_command,
      preserve_order=True,
      preserve_exception_message=True)

    # Hack to update sweep with the serialized indexers/refiners/integraters
    i_sweep = 0
    for crystal_id in crystals.keys():
      for wavelength_id in crystals[crystal_id].get_wavelength_names():
        wavelength = crystals[crystal_id].get_xwavelength(wavelength_id)
        remove_sweeps = []
        sweeps = wavelength.get_sweeps()
        for sweep in sweeps:
          success, output, xsweep_dict = results[i_sweep]
          assert xsweep_dict is not None
          if output is not None:
            Chatter.write(output)
          if not success:
            Chatter.write('Sweep failed: removing %s' %sweep.get_name())
            remove_sweeps.append(sweep)
          else:
            Chatter.write('Loading sweep: %s' % sweep.get_name())
            from xia2.Schema.XSweep import XSweep
            new_sweep = XSweep.from_dict(xsweep_dict)
            sweep._indexer = new_sweep._indexer
            sweep._refiner = new_sweep._refiner
            sweep._integrater = new_sweep._integrater
          i_sweep += 1
        for sweep in remove_sweeps:
          wavelength.remove_sweep(sweep)
          sample = sweep.get_xsample()
          sample.remove_sweep(sweep)

  else:
    for crystal_id in crystals.keys():
      for wavelength_id in crystals[crystal_id].get_wavelength_names():
        wavelength = crystals[crystal_id].get_xwavelength(wavelength_id)
        remove_sweeps = []
        sweeps = wavelength.get_sweeps()
        for sweep in sweeps:
          try:
            if stop_after == 'index':
              sweep.get_indexer_cell()
            else:
              sweep.get_integrater_intensities()
            sweep.serialize()
          except Exception, e:
            if failover:
              Chatter.write('Processing sweep %s failed: %s' % \
                            (sweep.get_name(), str(e)))
              remove_sweeps.append(sweep)
            else:
              raise
        for sweep in remove_sweeps:
          wavelength.remove_sweep(sweep)
          sample = sweep.get_xsample()
          sample.remove_sweep(sweep)
Ejemplo n.º 18
0
def multi_crystal_analysis(stop_after=None):
    '''Actually process something...'''

    assert os.path.exists('xia2.json')
    from xia2.Schema.XProject import XProject
    xinfo = XProject.from_json(filename='xia2.json')

    crystals = xinfo.get_crystals()
    for crystal_id, crystal in crystals.iteritems():
        cwd = os.path.abspath(os.curdir)
        working_directory = Environment.generate_directory(
            [crystal.get_name(), 'analysis'])
        os.chdir(working_directory)

        scaler = crystal._get_scaler()

        #epoch_to_si = {}
        epoch_to_batches = {}
        epoch_to_integrated_intensities = {}
        epoch_to_sweep_name = {}
        epoch_to_experiments_filename = {}
        epoch_to_experiments = {}
        sweep_name_to_epoch = {}
        epoch_to_first_image = {}

        from dxtbx.serialize import load
        try:
            epochs = scaler._sweep_information.keys()
            for epoch in epochs:
                si = scaler._sweep_information[epoch]
                epoch_to_batches[epoch] = si['batches']
                epoch_to_integrated_intensities[epoch] = si[
                    'corrected_intensities']
                epoch_to_sweep_name[epoch] = si['sname']
                sweep_name_to_epoch[si['name']] = epoch
                intgr = si['integrater']
                epoch_to_experiments_filename[
                    epoch] = intgr.get_integrated_experiments()
                epoch_to_experiments[epoch] = load.experiment_list(
                    intgr.get_integrated_experiments())

        except AttributeError:
            epochs = scaler._sweep_handler.get_epochs()
            for epoch in epochs:
                si = scaler._sweep_handler.get_sweep_information(epoch)
                epoch_to_batches[epoch] = si.get_batches()
                epoch_to_integrated_intensities[epoch] = si.get_reflections()
                epoch_to_sweep_name[epoch] = si.get_sweep_name()
                sweep_name_to_epoch[si.get_sweep_name()] = epoch
                intgr = si.get_integrater()
                epoch_to_experiments_filename[
                    epoch] = intgr.get_integrated_experiments()
                epoch_to_experiments[epoch] = load.experiment_list(
                    intgr.get_integrated_experiments())

        from xia2.Wrappers.Dials.StereographicProjection import StereographicProjection
        sp_json_files = {}
        for hkl in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
            sp = StereographicProjection()
            auto_logfiler(sp)
            sp.set_working_directory(working_directory)
            for experiments in epoch_to_experiments_filename.values():
                sp.add_experiments(experiments)
            sp.set_hkl(hkl)
            sp.run()
            sp_json_files[hkl] = sp.get_json_filename()

        unmerged_mtz = scaler.get_scaled_reflections(
            'mtz_unmerged').values()[0]
        from iotbx.reflection_file_reader import any_reflection_file
        reader = any_reflection_file(unmerged_mtz)

        from xia2.Wrappers.XIA.PlotMultiplicity import PlotMultiplicity
        mult_json_files = {}
        for axis in ('h', 'k', 'l'):
            pm = PlotMultiplicity()
            auto_logfiler(pm)
            pm.set_working_directory(working_directory)
            pm.set_mtz_filename(unmerged_mtz)
            pm.set_slice_axis(axis)
            pm.set_show_missing(True)
            pm.run()
            mult_json_files[axis] = pm.get_json_filename()

        intensities = None
        batches = None
        assert reader.file_type() == 'ccp4_mtz'
        arrays = reader.as_miller_arrays(merge_equivalents=False)
        for ma in arrays:
            if ma.info().labels == ['BATCH']:
                batches = ma
            elif ma.info().labels == ['I', 'SIGI']:
                intensities = ma
            elif ma.info().labels == ['I(+)', 'SIGI(+)', 'I(-)', 'SIGI(-)']:
                intensities = ma

        from xia2.Wrappers.CCP4.Blend import Blend
        hand_blender = Blend()
        hand_blender.set_working_directory(working_directory)
        auto_logfiler(hand_blender)
        Citations.cite('blend')

        from xia2.Handlers.Environment import which
        Rscript_binary = which('Rscript', debug=False)
        if Rscript_binary is None:
            Chatter.write('Skipping BLEND analysis: Rscript not available')
        else:
            for epoch in epochs:
                hand_blender.add_hklin(epoch_to_integrated_intensities[epoch],
                                       label=epoch_to_sweep_name[epoch])
            hand_blender.analysis()
            Chatter.write("Dendrogram saved to: %s" %
                          hand_blender.get_dendrogram_file())
            analysis = hand_blender.get_analysis()
            summary = hand_blender.get_summary()
            clusters = hand_blender.get_clusters()

            ddict = hand_blender.plot_dendrogram()

            phil_files_dir = 'phil_files'
            if not os.path.exists(phil_files_dir):
                os.makedirs(phil_files_dir)

            rows = []
            headers = [
                'Cluster', 'Datasets', 'Multiplicity', 'Completeness', 'LCV',
                'aLCV', 'Average unit cell'
            ]
            completeness = flex.double()
            average_unit_cell_params = []
            for i, cluster in clusters.iteritems():
                print i
                sel_cluster = flex.bool(batches.size(), False)
                cluster_uc_params = [flex.double() for k in range(6)]
                for j in cluster['dataset_ids']:
                    epoch = epochs[j - 1]
                    batch_start, batch_end = epoch_to_batches[epoch]
                    sel_cluster |= ((batches.data() >= batch_start) &
                                    (batches.data() <= batch_end))
                    expts = epoch_to_experiments.get(epoch)
                    assert expts is not None, (epoch)
                    assert len(expts) == 1, len(expts)
                    expt = expts[0]
                    uc_params = expt.crystal.get_unit_cell().parameters()
                    for k in range(6):
                        cluster_uc_params[k].append(uc_params[k])
                intensities_cluster = intensities.select(sel_cluster)
                merging = intensities_cluster.merge_equivalents()
                merged_intensities = merging.array()
                multiplicities = merging.redundancies()
                completeness.append(merged_intensities.completeness())
                average_unit_cell_params.append(
                    tuple(flex.mean(p) for p in cluster_uc_params))
                dataset_ids = cluster['dataset_ids']

                assert min(dataset_ids) > 0
                with open(
                        os.path.join(phil_files_dir,
                                     'blend_cluster_%i_images.phil' % i),
                        'wb') as f:
                    sweep_names = [
                        hand_blender._labels[dataset_id - 1]
                        for dataset_id in dataset_ids
                    ]
                    for sweep_name in sweep_names:
                        expts = epoch_to_experiments.get(
                            sweep_name_to_epoch.get(sweep_name))
                        assert expts is not None, (
                            sweep_name, sweep_name_to_epoch.get(sweep_name))
                        assert len(expts) == 1, len(expts)
                        expt = expts[0]
                        print >> f, 'xia2.settings.input.image = %s' % expt.imageset.get_path(
                            0)

                rows.append([
                    '%i' % i,
                    ' '.join(['%i'] * len(dataset_ids)) % tuple(dataset_ids),
                    '%.1f' % flex.mean(multiplicities.data().as_double()),
                    '%.2f' % completeness[-1],
                    '%.2f' % cluster['lcv'],
                    '%.2f' % cluster['alcv'],
                    '%g %g %g %g %g %g' % average_unit_cell_params[-1]
                ])

            # sort table by completeness
            perm = flex.sort_permutation(completeness)
            rows = [rows[i] for i in perm]

            print
            print 'Unit cell clustering summary:'
            print tabulate(rows, headers, tablefmt='rst')
            print

            blend_html = tabulate(rows, headers, tablefmt='html').replace(
                '<table>',
                '<table class="table table-hover table-condensed">').replace(
                    '<td>', '<td style="text-align: right;">')

    # XXX what about multiple wavelengths?
    with open('batches.phil', 'wb') as f:
        try:
            for epoch, si in scaler._sweep_information.iteritems():
                print >> f, "batch {"
                print >> f, "  id=%s" % si['sname']
                print >> f, "  range=%i,%i" % tuple(si['batches'])
                print >> f, "}"
        except AttributeError:
            for epoch in scaler._sweep_handler.get_epochs():
                si = scaler._sweep_handler.get_sweep_information(epoch)
                print >> f, "batch {"
                print >> f, "  id=%s" % si.get_sweep_name()
                print >> f, "  range=%i,%i" % tuple(si.get_batches())
                print >> f, "}"

    from xia2.Wrappers.XIA.MultiCrystalAnalysis import MultiCrystalAnalysis
    mca = MultiCrystalAnalysis()
    auto_logfiler(mca, extra="MultiCrystalAnalysis")
    mca.add_command_line_args([
        scaler.get_scaled_reflections(format="sca_unmerged").values()[0],
        "unit_cell=%s %s %s %s %s %s" % tuple(scaler.get_scaler_cell()),
        "batches.phil"
    ])
    mca.set_working_directory(working_directory)
    mca.run()

    intensity_clusters = mca.get_clusters()
    rows = []
    headers = [
        'Cluster', 'Datasets', 'Multiplicity', 'Completeness', 'Height',
        'Average unit cell'
    ]
    completeness = flex.double()
    average_unit_cell_params = []
    for i, cluster in intensity_clusters.iteritems():
        sel_cluster = flex.bool(batches.size(), False)
        cluster_uc_params = [flex.double() for k in range(6)]
        for j in cluster['datasets']:
            epoch = epochs[j - 1]
            batch_start, batch_end = epoch_to_batches[epoch]
            sel_cluster |= ((batches.data() >= batch_start) &
                            (batches.data() <= batch_end))
            expts = epoch_to_experiments.get(epoch)
            assert expts is not None, (epoch)
            assert len(expts) == 1, len(expts)
            expt = expts[0]
            uc_params = expt.crystal.get_unit_cell().parameters()
            for k in range(6):
                cluster_uc_params[k].append(uc_params[k])
        intensities_cluster = intensities.select(sel_cluster)
        merging = intensities_cluster.merge_equivalents()
        merged_intensities = merging.array()
        multiplicities = merging.redundancies()
        completeness.append(merged_intensities.completeness())
        average_unit_cell_params.append(
            tuple(flex.mean(p) for p in cluster_uc_params))
        dataset_ids = cluster['datasets']

        rows.append([
            '%i' % int(i),
            ' '.join(['%i'] * len(dataset_ids)) % tuple(dataset_ids),
            '%.1f' % flex.mean(multiplicities.data().as_double()),
            '%.2f' % completeness[-1],
            '%.2f' % cluster['height'],
            '%g %g %g %g %g %g' % average_unit_cell_params[-1]
        ])

    # sort table by completeness
    perm = flex.sort_permutation(completeness)
    rows = [rows[i] for i in perm]

    print 'Intensity clustering summary:'
    print tabulate(rows, headers, tablefmt='rst')
    print

    intensity_clustering_html = tabulate(
        rows, headers, tablefmt='html').replace(
            '<table>',
            '<table class="table table-hover table-condensed">').replace(
                '<td>', '<td style="text-align: right;">')

    import json

    json_data = {}
    if ddict is not None:
        from xia2.Modules.MultiCrystalAnalysis import scipy_dendrogram_to_plotly_json
        json_data['blend_dendrogram'] = scipy_dendrogram_to_plotly_json(ddict)
    else:
        json_data['blend_dendrogram'] = {'data': [], 'layout': {}}

    json_data['intensity_clustering'] = mca.get_dict()
    del json_data['intensity_clustering']['clusters']

    for hkl in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
        with open(sp_json_files[hkl], 'rb') as f:
            d = json.load(f)
            d['layout'][
                'title'] = 'Stereographic projection (hkl=%i%i%i)' % hkl
            json_data['stereographic_projection_%s%s%s' % hkl] = d

    for axis in ('h', 'k', 'l'):
        with open(mult_json_files[axis], 'rb') as f:
            json_data['multiplicity_%s' % axis] = json.load(f)

    json_str = json.dumps(json_data, indent=2)

    javascript = ['var graphs = %s' % (json_str)]
    javascript.append(
        'Plotly.newPlot(blend_dendrogram, graphs.blend_dendrogram.data, graphs.blend_dendrogram.layout);'
    )
    javascript.append(
        'Plotly.newPlot(intensity_clustering, graphs.intensity_clustering.data, graphs.intensity_clustering.layout);'
    )
    for hkl in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
        javascript.append(
            'Plotly.newPlot(stereographic_projection_%(hkl)s, graphs.stereographic_projection_%(hkl)s.data, graphs.stereographic_projection_%(hkl)s.layout);'
            % ({
                'hkl': "%s%s%s" % hkl
            }))
    for axis in ('h', 'k', 'l'):
        javascript.append(
            'Plotly.newPlot(multiplicity_%(axis)s, graphs.multiplicity_%(axis)s.data, graphs.multiplicity_%(axis)s.layout);'
            % ({
                'axis': axis
            }))

    html_header = '''
<head>

<!-- Plotly.js -->
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>

<meta name="viewport" content="width=device-width, initial-scale=1" charset="UTF-8">
<link rel="stylesheet" href="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css"/>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
<script src="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
<style type="text/css">

body {
  /*font-family: Helmet, Freesans, Helvetica, Arial, sans-serif;*/
  margin: 8px;
  min-width: 240px;
  margin-left: 5%;
  margin-right: 5%;
}

.plot {
  float: left;
  width: 1200px;
  height: 800px;
  margin-bottom: 20px;
}

.square_plot {
  float: left;
  width: 800px;
  height: 800px;
  margin-bottom: 20px;
}

</style>

</head>

'''

    html_body = '''

<body>

<div class="page-header">
  <h1>Multi-crystal analysis report</h1>
</div>

<div class="panel-group">

  <div class="panel panel-default">
    <div class="panel-heading" data-toggle="collapse" href="#collapse_multiplicity">
      <h4 class="panel-title">
        <a>Multiplicity plots</a>
      </h4>
    </div>
    <div id="collapse_multiplicity" class="panel-collapse collapse">
      <div class="panel-body">
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="multiplicity_h"></div>
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="multiplicity_k"></div>
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="multiplicity_l"></div>
      </div>
    </div>
  </div>

  <div class="panel panel-default">
    <div class="panel-heading" data-toggle="collapse" href="#collapse_stereographic_projection">
      <h4 class="panel-title">
        <a>Stereographic projections</a>
      </h4>
    </div>
    <div id="collapse_stereographic_projection" class="panel-collapse collapse">
      <div class="panel-body">
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="stereographic_projection_100"></div>
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="stereographic_projection_010"></div>
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="stereographic_projection_001"></div>
      </div>
    </div>
  </div>

  <div class="panel panel-default">
    <div class="panel-heading" data-toggle="collapse" href="#collapse_cell">
      <h4 class="panel-title">
        <a>Unit cell clustering</a>
      </h4>
    </div>
    <div id="collapse_cell" class="panel-collapse collapse">
      <div class="panel-body">
        <div class="col-xs-12 col-sm-12 col-md-12 plot" id="blend_dendrogram"></div>
        <div class="table-responsive" style="width: 800px">
          %(blend_html)s
        </div>
      </div>
    </div>
  </div>

  <div class="panel panel-default">
    <div class="panel-heading" data-toggle="collapse" href="#collapse_intensity">
      <h4 class="panel-title">
        <a>Intensity clustering</a>
      </h4>
    </div>
    <div id="collapse_intensity" class="panel-collapse collapse">
      <div class="panel-body">
        <div class="col-xs-12 col-sm-12 col-md-12 plot" id="intensity_clustering" style="height:1000px"></div>
        <div class="table-responsive" style="width: 800px">
          %(intensity_clustering_html)s
        </div>
      </div>
    </div>
  </div>
</div>

<script>
%(script)s
</script>
</body>
    ''' % {
        'script': '\n'.join(javascript),
        'blend_html': blend_html,
        'intensity_clustering_html': intensity_clustering_html
    }

    html = '\n'.join([html_header, html_body])

    print "Writing html report to: %s" % 'multi-crystal-report.html'
    with open('multi-crystal-report.html', 'wb') as f:
        print >> f, html.encode('ascii', 'xmlcharrefreplace')

    write_citations()

    Environment.cleanup()

    return
Ejemplo n.º 19
0
def process_one_sweep(args):
    assert len(args) == 1
    args = args[0]
    # stop_after = args.stop_after

    command_line_args = args.command_line_args
    nproc = args.nproc
    crystal_id = args.crystal_id
    wavelength_id = args.wavelength_id
    sweep_id = args.sweep_id
    failover = args.failover
    driver_type = args.driver_type

    default_driver_type = DriverFactory.get_driver_type()
    DriverFactory.set_driver_type(driver_type)

    curdir = os.path.abspath(os.curdir)

    if "-xinfo" in command_line_args:
        idx = command_line_args.index("-xinfo")
        del command_line_args[idx + 1]
        del command_line_args[idx]

    xia2_integrate = XIA2Integrate()

    # import tempfile
    # tmpdir = tempfile.mkdtemp(dir=curdir)

    tmpdir = os.path.join(curdir, str(uuid.uuid4()))
    os.makedirs(tmpdir)
    xia2_integrate.set_working_directory(tmpdir)
    xia2_integrate.add_command_line_args(args.command_line_args)
    xia2_integrate.set_phil_file(os.path.join(curdir, "xia2-working.phil"))
    xia2_integrate.add_command_line_args(["sweep.id=%s" % sweep_id])
    xia2_integrate.set_nproc(nproc)
    xia2_integrate.set_njob(1)
    xia2_integrate.set_mp_mode("serial")
    auto_logfiler(xia2_integrate)

    sweep_tmp_dir = os.path.join(tmpdir, crystal_id, wavelength_id, sweep_id)
    sweep_target_dir = os.path.join(curdir, crystal_id, wavelength_id,
                                    sweep_id)

    output = None
    success = False
    xsweep_dict = None

    try:
        xia2_integrate.run()
        output = get_sweep_output_only(xia2_integrate.get_all_output())
        success = True
    except Exception as e:
        logger.warning("Processing sweep %s failed: %s", sweep_id, str(e))
        if not failover:
            raise
    finally:
        from xia2.Schema.XProject import XProject

        xia2_json = os.path.join(tmpdir, "xia2.json")
        json_files = glob.glob(os.path.join(sweep_tmp_dir, "*", "*.json"))
        json_files.extend(glob.glob(os.path.join(sweep_tmp_dir, "*",
                                                 "*.expt")))
        if os.path.exists(xia2_json):
            json_files.append(xia2_json)

        import fileinput

        for line in fileinput.FileInput(files=json_files, inplace=1):
            line = line.replace(sweep_tmp_dir, sweep_target_dir)
            print(line)

        if os.path.exists(xia2_json):
            new_json = os.path.join(curdir, "xia2-%s.json" % sweep_id)

            shutil.copyfile(xia2_json, new_json)

        move_output_folder(sweep_tmp_dir, sweep_target_dir)

        if success:
            xinfo = XProject.from_json(new_json)
            xcryst = list(xinfo.get_crystals().values())[0]
            xsweep = xcryst.get_xwavelength(wavelength_id).get_sweeps()[0]
            xsweep_dict = xsweep.to_dict()

        shutil.rmtree(tmpdir, ignore_errors=True)
        if os.path.exists(tmpdir):
            shutil.rmtree(tmpdir, ignore_errors=True)
        DriverFactory.set_driver_type(default_driver_type)
        return success, output, xsweep_dict
Ejemplo n.º 20
0
def multi_crystal_analysis(stop_after=None):
  '''Actually process something...'''

  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  crystals = xinfo.get_crystals()
  for crystal_id, crystal in crystals.iteritems():
    cwd = os.path.abspath(os.curdir)
    working_directory = Environment.generate_directory(
      [crystal.get_name(), 'analysis'])
    os.chdir(working_directory)

    from xia2.Wrappers.CCP4.Blend import Blend

    from xia2.lib.bits import auto_logfiler
    hand_blender = Blend()
    hand_blender.set_working_directory(working_directory)
    auto_logfiler(hand_blender)
    Citations.cite('blend')

    scaler = crystal._get_scaler()

    #epoch_to_si = {}
    epoch_to_batches = {}
    epoch_to_integrated_intensities = {}
    epoch_to_sweep_name = {}

    try:
      epochs = scaler._sweep_information.keys()
      for epoch in epochs:
        si = scaler._sweep_information[epoch]
        epoch_to_batches[epoch] = si['batches']
        epoch_to_integrated_intensities[epoch] = si['corrected_intensities']
        epoch_to_sweep_name[epoch] = si['sname']
    except AttributeError, e:
      epochs = scaler._sweep_handler.get_epochs()
      for epoch in epochs:
        si = scaler._sweep_handler.get_sweep_information(epoch)
        epoch_to_batches[epoch] = si.get_batches()
        epoch_to_integrated_intensities[epoch] = si.get_reflections()
        epoch_to_sweep_name[epoch] = si.get_sweep_name()

    unmerged_mtz = scaler.get_scaled_reflections('mtz_unmerged').values()[0]
    from iotbx.reflection_file_reader import any_reflection_file
    reader = any_reflection_file(unmerged_mtz)

    intensities = None
    batches = None
    assert reader.file_type() == 'ccp4_mtz'
    arrays = reader.as_miller_arrays(merge_equivalents=False)
    for ma in arrays:
      if ma.info().labels == ['BATCH']:
        batches = ma
      elif ma.info().labels == ['I', 'SIGI']:
        intensities = ma
      elif ma.info().labels == ['I(+)', 'SIGI(+)', 'I(-)', 'SIGI(-)']:
        intensities = ma

    from xia2.Handlers.Environment import which
    Rscript_binary = which('Rscript', debug=False)
    if Rscript_binary is None:
      Chatter.write('Skipping BLEND analysis: Rscript not available')
    else:
      for epoch in epochs:
        hand_blender.add_hklin(epoch_to_integrated_intensities[epoch],
                               label=epoch_to_sweep_name[epoch])
      hand_blender.analysis()
      Chatter.write("Dendrogram saved to: %s" %hand_blender.get_dendrogram_file())
      analysis = hand_blender.get_analysis()
      summary = hand_blender.get_summary()
      clusters = hand_blender.get_clusters()

      linkage_matrix = hand_blender.get_linkage_matrix()
      ddict = hand_blender.plot_dendrogram()

      rows = []
      headers = ['Cluster', 'Datasets', 'Multiplicity', 'Completeness', 'LCV', 'aLCV']
      completeness = flex.double()
      for i, cluster in clusters.iteritems():
        sel_cluster = flex.bool(batches.size(), False)
        for j in cluster['dataset_ids']:
          batch_start, batch_end = epoch_to_batches[epochs[j-1]]
          sel_cluster |= (
            (batches.data() >= batch_start) & (batches.data() <= batch_end))
        intensities_cluster = intensities.select(sel_cluster)
        merging = intensities_cluster.merge_equivalents()
        merged_intensities = merging.array()
        multiplicities = merging.redundancies()
        completeness.append(merged_intensities.completeness())
        dataset_ids = cluster['dataset_ids']

        rows.append(
          ['%i' %i, ' '.join(['%i'] * len(dataset_ids)) %tuple(dataset_ids),
           '%.1f' %flex.mean(multiplicities.data().as_double()),
           '%.2f' %completeness[-1],
           '%.2f' %cluster['lcv'], '%.2f' %cluster['alcv']])

      # sort table by completeness
      perm = flex.sort_permutation(completeness)
      rows = [rows[i] for i in perm]

      print
      print 'Unit cell clustering summary:'
      print tabulate(rows, headers, tablefmt='rst')
      print

      blend_html = tabulate(rows, headers, tablefmt='html').replace(
        '<table>', '<table class="table table-hover table-condensed">').replace(
    '<td>', '<td style="text-align: right;">')
Ejemplo n.º 21
0
Archivo: print.py Proyecto: hainm/xia2
def run():
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')
  Chatter.write(xinfo.get_output())
  write_citations()
Ejemplo n.º 22
0
def zocalo_object():
    assert os.path.exists("xia2.json")
    xinfo = XProject.from_json(filename="xia2.json")
    crystals = xinfo.get_crystals()
    assert len(crystals) == 1
    return xia2.Interfaces.ISPyB.xia2_to_json_object(list(crystals.values()))
Ejemplo n.º 23
0
Archivo: html.py Proyecto: xia2/xia2
def run():
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')
  generate_xia2_html(xinfo)
Ejemplo n.º 24
0
def xia2_main(stop_after=None):
    """Actually process something..."""
    Citations.cite("xia2")

    # print versions of related software
    logger.info(dials_version())

    ccp4_version = get_ccp4_version()
    if ccp4_version:
        logger.info("CCP4 %s", ccp4_version)

    start_time = time.time()

    CommandLine = get_command_line()

    # check that something useful has been assigned for processing...
    xtals = CommandLine.get_xinfo().get_crystals()

    for name, xtal in xtals.items():
        if not xtal.get_all_image_names():
            logger.info("-----------------------------------" +
                        "-" * len(name))
            logger.info("| No images assigned for crystal %s |", name)
            logger.info("-----------------------------------" +
                        "-" * len(name))

    from xia2.Handlers.Phil import PhilIndex

    params = PhilIndex.get_python_object()
    mp_params = params.xia2.settings.multiprocessing
    njob = mp_params.njob

    xinfo = CommandLine.get_xinfo()
    logger.info("Project directory: %s", xinfo.path)

    if (params.xia2.settings.developmental.continue_from_previous_job
            and os.path.exists("xia2.json")):
        logger.debug("==== Starting from existing xia2.json ====")
        xinfo_new = xinfo
        xinfo = XProject.from_json(filename="xia2.json")

        crystals = xinfo.get_crystals()
        crystals_new = xinfo_new.get_crystals()
        for crystal_id in crystals_new:
            if crystal_id not in crystals:
                crystals[crystal_id] = crystals_new[crystal_id]
                continue
            crystals[crystal_id]._scaler = None  # reset scaler
            for wavelength_id in crystals_new[crystal_id].get_wavelength_names(
            ):
                wavelength_new = crystals_new[crystal_id].get_xwavelength(
                    wavelength_id)
                if wavelength_id not in crystals[
                        crystal_id].get_wavelength_names():
                    crystals[crystal_id].add_wavelength(
                        crystals_new[crystal_id].get_xwavelength(
                            wavelength_new))
                    continue
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                sweeps_new = wavelength_new.get_sweeps()
                sweeps = wavelength.get_sweeps()
                sweep_names = {s.get_name() for s in sweeps}
                sweep_keys = {(s.get_directory(), s.get_template(),
                               s.get_image_range())
                              for s in sweeps}
                for sweep in sweeps_new:
                    if (
                            sweep.get_directory(),
                            sweep.get_template(),
                            sweep.get_image_range(),
                    ) not in sweep_keys:
                        if sweep.get_name() in sweep_names:
                            i = 1
                            while "SWEEEP%i" % i in sweep_names:
                                i += 1
                            sweep._name = "SWEEP%i" % i
                            break
                        wavelength.add_sweep(
                            name=sweep.get_name(),
                            sample=sweep.sample,
                            directory=sweep.get_directory(),
                            image=sweep.get_image(),
                            beam=sweep.get_beam_centre(),
                            reversephi=sweep.get_reversephi(),
                            distance=sweep.get_distance(),
                            gain=sweep.get_gain(),
                            dmin=sweep.get_resolution_high(),
                            dmax=sweep.get_resolution_low(),
                            polarization=sweep.get_polarization(),
                            frames_to_process=sweep.get_frames_to_process(),
                            user_lattice=sweep.get_user_lattice(),
                            user_cell=sweep.get_user_cell(),
                            epoch=sweep._epoch,
                            ice=sweep._ice,
                            excluded_regions=sweep._excluded_regions,
                        )
                        sweep_names.add(sweep.get_name())

    crystals = xinfo.get_crystals()

    failover = params.xia2.settings.failover

    with cleanup(xinfo.path):
        if mp_params.mode == "parallel" and njob > 1:
            driver_type = mp_params.type
            command_line_args = CommandLine.get_argv()[1:]
            jobs = []
            for crystal_id in crystals:
                for wavelength_id in crystals[crystal_id].get_wavelength_names(
                ):
                    wavelength = crystals[crystal_id].get_xwavelength(
                        wavelength_id)
                    sweeps = wavelength.get_sweeps()
                    for sweep in sweeps:
                        sweep._get_indexer()
                        sweep._get_refiner()
                        sweep._get_integrater()
                        jobs.append((group_args(
                            driver_type=driver_type,
                            stop_after=stop_after,
                            failover=failover,
                            command_line_args=command_line_args,
                            nproc=mp_params.nproc,
                            crystal_id=crystal_id,
                            wavelength_id=wavelength_id,
                            sweep_id=sweep.get_name(),
                        ), ))

            from xia2.Driver.DriverFactory import DriverFactory

            default_driver_type = DriverFactory.get_driver_type()

            # run every nth job on the current computer (no need to submit to qsub)
            for i_job, arg in enumerate(jobs):
                if (i_job % njob) == 0:
                    arg[0].driver_type = default_driver_type

            nproc = mp_params.nproc
            qsub_command = mp_params.qsub_command or "qsub"
            qsub_command = "%s -V -cwd -pe smp %d" % (qsub_command, nproc)

            from libtbx import easy_mp

            results = easy_mp.parallel_map(
                process_one_sweep,
                jobs,
                processes=njob,
                method="multiprocessing",
                qsub_command=qsub_command,
                preserve_order=True,
                preserve_exception_message=True,
            )

            # Hack to update sweep with the serialized indexers/refiners/integraters
            i_sweep = 0
            for crystal_id in crystals:
                for wavelength_id in crystals[crystal_id].get_wavelength_names(
                ):
                    wavelength = crystals[crystal_id].get_xwavelength(
                        wavelength_id)
                    remove_sweeps = []
                    sweeps = wavelength.get_sweeps()
                    for sweep in sweeps:
                        success, output, xsweep_dict = results[i_sweep]
                        if output is not None:
                            logger.info(output)
                        if not success:
                            logger.info("Sweep failed: removing %s",
                                        sweep.get_name())
                            remove_sweeps.append(sweep)
                        else:
                            assert xsweep_dict is not None
                            logger.info("Loading sweep: %s", sweep.get_name())
                            new_sweep = XSweep.from_dict(xsweep_dict)
                            sweep._indexer = new_sweep._indexer
                            sweep._refiner = new_sweep._refiner
                            sweep._integrater = new_sweep._integrater
                        i_sweep += 1
                    for sweep in remove_sweeps:
                        wavelength.remove_sweep(sweep)
                        sample = sweep.sample
                        sample.remove_sweep(sweep)

        else:
            for crystal_id in list(crystals.keys()):
                for wavelength_id in crystals[crystal_id].get_wavelength_names(
                ):
                    wavelength = crystals[crystal_id].get_xwavelength(
                        wavelength_id)
                    remove_sweeps = []
                    sweeps = wavelength.get_sweeps()
                    for sweep in sweeps:
                        from dials.command_line.show import show_experiments
                        from dxtbx.model.experiment_list import ExperimentListFactory

                        logger.debug(sweep.get_name())
                        logger.debug(
                            show_experiments(
                                ExperimentListFactory.
                                from_imageset_and_crystal(
                                    sweep.get_imageset(), None)))
                        Citations.cite("dials")
                        try:
                            if stop_after == "index":
                                sweep.get_indexer_cell()
                            else:
                                sweep.get_integrater_intensities()
                            sweep.serialize()
                        except Exception as e:
                            if failover:
                                logger.info(
                                    "Processing sweep %s failed: %s",
                                    sweep.get_name(),
                                    str(e),
                                )
                                remove_sweeps.append(sweep)
                            else:
                                raise
                    for sweep in remove_sweeps:
                        wavelength.remove_sweep(sweep)
                        sample = sweep.sample
                        sample.remove_sweep(sweep)

        # save intermediate xia2.json file in case scaling step fails
        xinfo.as_json(filename="xia2.json")

        if stop_after not in ("index", "integrate"):
            logger.info(xinfo.get_output())

        for crystal in list(crystals.values()):
            crystal.serialize()

        # save final xia2.json file in case report generation fails
        xinfo.as_json(filename="xia2.json")

        if stop_after not in ("index", "integrate"):
            # and the summary file
            with open("xia2-summary.dat", "w") as fh:
                for record in xinfo.summarise():
                    fh.write("%s\n" % record)

            # looks like this import overwrites the initial command line
            # Phil overrides so... for https://github.com/xia2/xia2/issues/150
            from xia2.command_line.html import generate_xia2_html

            if params.xia2.settings.small_molecule:
                params.xia2.settings.report.xtriage_analysis = False
                params.xia2.settings.report.include_radiation_damage = False

            with xia2.Driver.timing.record_step("xia2.report"):
                generate_xia2_html(xinfo,
                                   filename="xia2.html",
                                   params=params.xia2.settings.report)

        duration = time.time() - start_time

        # write out the time taken in a human readable way
        logger.info("Processing took %s",
                    time.strftime("%Hh %Mm %Ss", time.gmtime(duration)))

        write_citations()
Ejemplo n.º 25
0
def run(args):
    assert os.path.exists("xia2.json")
    from xia2.Schema.XProject import XProject

    xinfo = XProject.from_json(filename="xia2.json")
    generate_xia2_html(xinfo, args=args)
Ejemplo n.º 26
0
def exercise_serialization(dials_data, tmp_dir):
    base_path = pathlib.Path(tmp_dir)
    template = dials_data("insulin").join("insulin_1_###.img").strpath

    from xia2.Modules.Indexer.DialsIndexer import DialsIndexer
    from xia2.Modules.Refiner.DialsRefiner import DialsRefiner
    from xia2.Modules.Integrater.DialsIntegrater import DialsIntegrater
    from xia2.Modules.Scaler.CCP4ScalerA import CCP4ScalerA

    from dxtbx.model.experiment_list import ExperimentListTemplateImporter

    importer = ExperimentListTemplateImporter([template])
    experiments = importer.experiments
    imageset = experiments.imagesets()[0]

    from xia2.Schema.XProject import XProject
    from xia2.Schema.XCrystal import XCrystal
    from xia2.Schema.XWavelength import XWavelength
    from xia2.Schema.XSweep import XSweep
    from xia2.Schema.XSample import XSample

    proj = XProject(base_path=base_path)
    proj._name = "PROJ1"
    cryst = XCrystal("CRYST1", proj)
    wav = XWavelength("WAVE1", cryst, wavelength=0.98)
    samp = XSample("X1", cryst)
    cryst.add_wavelength(wav)
    cryst.set_ha_info({"atom": "S"})
    cryst.add_sample(samp)
    directory, image = os.path.split(imageset.get_path(1))
    sweep = wav.add_sweep(name="SWEEP1", sample=samp, directory=directory, image=image)
    samp.add_sweep(sweep)

    from dxtbx.serialize.load import _decode_dict

    indexer = DialsIndexer()
    indexer.set_working_directory(tmp_dir)
    indexer.add_indexer_imageset(imageset)
    indexer.set_indexer_sweep(sweep)
    sweep._indexer = indexer

    refiner = DialsRefiner()
    refiner.set_working_directory(tmp_dir)
    refiner.add_refiner_indexer(sweep.get_epoch(1), indexer)
    refiner.add_refiner_sweep(sweep)
    sweep._refiner = refiner

    integrater = DialsIntegrater()
    integrater.set_output_format("hkl")
    integrater.set_working_directory(tmp_dir)
    integrater.setup_from_image(imageset.get_path(1))
    integrater.set_integrater_refiner(refiner)
    # integrater.set_integrater_indexer(indexer)
    integrater.set_integrater_sweep(sweep)
    integrater.set_integrater_epoch(sweep.get_epoch(1))
    integrater.set_integrater_sweep_name(sweep.get_name())
    integrater.set_integrater_project_info(
        cryst.get_name(), wav.get_name(), sweep.get_name()
    )
    sweep._integrater = integrater

    scaler = CCP4ScalerA(base_path=base_path)
    scaler.add_scaler_integrater(integrater)
    scaler.set_scaler_xcrystal(cryst)
    scaler.set_scaler_project_info(cryst.get_name(), wav.get_name())
    scaler._scalr_xcrystal = cryst
    cryst._scaler = scaler

    proj.add_crystal(cryst)

    s_dict = sweep.to_dict()
    s_str = json.dumps(s_dict, ensure_ascii=True)
    s_dict = json.loads(s_str, object_hook=_decode_dict)
    xsweep = XSweep.from_dict(s_dict)
    assert xsweep

    w_dict = wav.to_dict()
    w_str = json.dumps(w_dict, ensure_ascii=True)
    w_dict = json.loads(w_str, object_hook=_decode_dict)
    xwav = XWavelength.from_dict(w_dict)
    assert xwav.get_sweeps()[0].get_wavelength() is xwav

    c_dict = cryst.to_dict()
    c_str = json.dumps(c_dict, ensure_ascii=True)
    c_dict = json.loads(c_str, object_hook=_decode_dict)
    xcryst = XCrystal.from_dict(c_dict)
    assert (
        xcryst.get_xwavelength(xcryst.get_wavelength_names()[0]).get_crystal() is xcryst
    )

    p_dict = proj.to_dict()
    p_str = json.dumps(p_dict, ensure_ascii=True)
    p_dict = json.loads(p_str, object_hook=_decode_dict)
    xproj = XProject.from_dict(p_dict)
    assert xproj.path == base_path
    assert list(xproj.get_crystals().values())[0].get_project() is xproj
    assert list(xproj.get_crystals().values())[0]._scaler._base_path == base_path

    json_str = proj.as_json()
    xproj = XProject.from_json(string=json_str)
    assert xproj.path == base_path
    assert list(xproj.get_crystals().values())[0].get_project() is xproj
    print(xproj.get_output())
    print("\n".join(xproj.summarise()))
    json_str = xproj.as_json()
    xproj = XProject.from_json(string=json_str)
    assert xproj.path == base_path
    # Test that we can serialize to json and back again
    xproj = XProject.from_json(string=xproj.as_json())
    assert xproj.path == base_path
    xcryst = list(xproj.get_crystals().values())[0]
    assert xcryst.get_project() is xproj
    intgr = xcryst._get_integraters()[0]
    assert intgr.get_integrater_finish_done()
    assert (
        xcryst._get_scaler()
        ._sweep_handler.get_sweep_information(intgr.get_integrater_epoch())
        .get_integrater()
        is intgr
    )

    print(xproj.get_output())
    print("\n".join(xproj.summarise()))
Ejemplo n.º 27
0
def reconstruct_peabox(params):
    assert os.path.exists('xia2.json')
    from xia2.Schema.XProject import XProject
    xinfo = XProject.from_json(filename='xia2.json')

    from dxtbx.model.experiment_list import ExperimentListFactory
    import cPickle as pickle
    import dials  # because WARNING:root:No profile class gaussian_rs registered
    from dials.array_family import flex
    crystals = xinfo.get_crystals()
    assert len(crystals) == 1

    for xname in crystals:
        crystal = crystals[xname]

    scaler = crystal._get_scaler()

    epochs = scaler._sweep_handler.get_epochs()

    from xia2.command_line.rogues_gallery import munch_rogues
    from pprint import pprint

    batched_reflections = {}

    for epoch in epochs:
        si = scaler._sweep_handler.get_sweep_information(epoch)
        intgr = si.get_integrater()
        experiments = ExperimentListFactory.from_json_file(
            intgr.get_integrated_experiments())
        reflections = pickle.load(open(intgr.get_integrated_reflections()))
        batched_reflections[si.get_batch_range()] = (experiments, reflections,
                                                     si.get_sweep_name())
        from dials.util import debug_console
        #   debug_console()

        good = reflections.get_flags(reflections.flags.integrated)
        #   bad  = reflections.get_flags(reflections.flags.bad_spot)
        reflections = reflections.select(good)

        for r in reflections:
            flags = r['flags']
            r['flags'] = []
            for v, f in reflections.flags.values.iteritems():
                if flags & f:
                    r['flags'].append(str(f))
            r['flags'] = ', '.join(r['flags'])
#     pprint(r)

        print "Collecting shoeboxes for %d reflections" % len(reflections)

        reflections["shoebox"] = flex.shoebox(reflections["panel"],
                                              reflections["bbox"],
                                              allocate=True)
        reflections.extract_shoeboxes(experiments.imagesets()[0], verbose=True)
        print "Consolidating..."

        sizes = {}
        for r in reflections:
            s = r['shoebox']
            a = s.size()
            if a not in sizes:
                sizes[a] = {
                    'sum': flex.float([0] * a[0] * a[1] * a[2]),
                    'weights': flex.int([0] * a[0] * a[1] * a[2]),
                    'count': 0
                }
            s.mask.set_selected(s.data < 0, 0)
            s.data.set_selected(s.mask == 0, 0)
            s.background.set_selected(s.mask == 0, 0)
            sizes[a]['sum'] += s.data - s.background
            sizes[a]['weights'] += s.mask
            sizes[a]['count'] += 1

        print len(sizes), "shoebox sizes extracted"

        for s, c in sizes.iteritems():
            print "%dx %s" % (c['count'], str(s))
            sdat = iter(c['sum'])
            wdat = iter(c['weights'])
            for z in range(s[0]):
                for y in range(s[1]):
                    count = [next(sdat) for x in range(s[2])]
                    weight = [next(wdat) for x in range(s[2])]
                    truecount = (0 if w == 0 else 10 * c / w
                                 for c, w in zip(count, weight))
                    visualize = ("X" if c < 0 else
                                 ("." if c < 10 else str(int(math.log10(c))))
                                 for c in truecount)
                    print "".join(visualize)
                print ""
            print ""
Ejemplo n.º 28
0
def reconstruct_rogues(params):
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  import cPickle as pickle
  import dials # because WARNING:root:No profile class gaussian_rs registered
  crystals = xinfo.get_crystals()
  assert len(crystals) == 1

  for xname in crystals:
    crystal = crystals[xname]

  scaler = crystal._get_scaler()

  epochs = scaler._sweep_handler.get_epochs()

  rogues = os.path.join(scaler.get_working_directory(),
                        xname, 'scale', 'ROGUES')

  rogue_reflections = munch_rogues(rogues)

  batched_reflections = { }

  for epoch in epochs:
    si = scaler._sweep_handler.get_sweep_information(epoch)
    intgr = si.get_integrater()
    experiments = ExperimentListFactory.from_json_file(
      intgr.get_integrated_experiments())
    reflections = pickle.load(open(intgr.get_integrated_reflections()))
    batched_reflections[si.get_batch_range()] = (experiments, reflections,
                                                 si.get_sweep_name())

  # - look up reflection in reflection list, get bounding box
  # - pull pixels given from image set, flatten these, write out

  from dials.array_family import flex
  from annlib_ext import AnnAdaptor as ann_adaptor

  reflections_run = { }
  for run in batched_reflections:
    reflections_run[run] = []

  for rogue in rogue_reflections:
    b = rogue[0]
    for run in batched_reflections:
      if b >= run[0] and b <= run[1]:
        reflections_run[run].append(rogue)
        break

  for run_no, run in enumerate(reflections_run):
    experiment = batched_reflections[run][0]
    reflections = batched_reflections[run][1]
    name = batched_reflections[run][2]
    rogues = reflections_run[run]
    reference = flex.double()
    scan = experiment.scans()[0]
    images = experiment.imagesets()[0]
    for xyz in reflections['xyzcal.px']:
      reference.append(xyz[0])
      reference.append(xyz[1])
      reference.append(xyz[2])

    search = flex.double()
    for rogue in rogues:
      search.append(rogue[1])
      search.append(rogue[2])
      search.append(scan.get_array_index_from_angle(rogue[3]))

    ann = ann_adaptor(data=reference, dim=3, k=1)
    ann.query(search)

    keep = flex.bool(len(reflections), False)

    for j, rogue in enumerate(rogues):
      keep[ann.nn[j]] = True

    reflections = reflections.select(keep==True)

    if params.extract:
      reflections["shoebox"] = flex.shoebox(
        reflections["panel"],
        reflections["bbox"],
        allocate=True)
      reflections.extract_shoeboxes(images, verbose=False)

    if len(reflections_run) > 1:
      output = params.output.reflections.replace(
          '.pickle', '-%s.pickle' % name)
      print 'Extracted %d rogue reflections for %s to %s' % \
        (len(reflections), name, output)
      reflections.as_pickle(output)
    else:
      output = params.output.reflections
      print 'Extracted %d rogue reflections to %s' % \
        (len(reflections), output)
      reflections.as_pickle(output)
Ejemplo n.º 29
0
def xia2_main(stop_after=None):
    '''Actually process something...'''
    Citations.cite('xia2')

    # print versions of related software
    Chatter.write(dials_version())

    ccp4_version = get_ccp4_version()
    if ccp4_version is not None:
        Chatter.write('CCP4 %s' % ccp4_version)

    start_time = time.time()

    CommandLine = get_command_line()
    start_dir = Flags.get_starting_directory()

    # check that something useful has been assigned for processing...
    xtals = CommandLine.get_xinfo().get_crystals()

    no_images = True

    for name in xtals.keys():
        xtal = xtals[name]

        if not xtal.get_all_image_names():

            Chatter.write('-----------------------------------' + \
                          '-' * len(name))
            Chatter.write('| No images assigned for crystal %s |' % name)
            Chatter.write('-----------------------------------' + '-' \
                          * len(name))
        else:
            no_images = False

    args = []

    from xia2.Handlers.Phil import PhilIndex
    params = PhilIndex.get_python_object()
    mp_params = params.xia2.settings.multiprocessing
    njob = mp_params.njob

    from libtbx import group_args

    xinfo = CommandLine.get_xinfo()

    if os.path.exists('xia2.json'):
        from xia2.Schema.XProject import XProject
        xinfo_new = xinfo
        xinfo = XProject.from_json(filename='xia2.json')

        crystals = xinfo.get_crystals()
        crystals_new = xinfo_new.get_crystals()
        for crystal_id in crystals_new.keys():
            if crystal_id not in crystals:
                crystals[crystal_id] = crystals_new[crystal_id]
                continue
            crystals[crystal_id]._scaler = None  # reset scaler
            for wavelength_id in crystals_new[crystal_id].get_wavelength_names(
            ):
                wavelength_new = crystals_new[crystal_id].get_xwavelength(
                    wavelength_id)
                if wavelength_id not in crystals[
                        crystal_id].get_wavelength_names():
                    crystals[crystal_id].add_wavelength(
                        crystals_new[crystal_id].get_xwavelength(
                            wavelength_new))
                    continue
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                sweeps_new = wavelength_new.get_sweeps()
                sweeps = wavelength.get_sweeps()
                sweep_names = [s.get_name() for s in sweeps]
                sweep_keys = [(s.get_directory(), s.get_template(),
                               s.get_image_range()) for s in sweeps]
                for sweep in sweeps_new:
                    if ((sweep.get_directory(), sweep.get_template(),
                         sweep.get_image_range()) not in sweep_keys):
                        if sweep.get_name() in sweep_names:
                            i = 1
                            while 'SWEEEP%i' % i in sweep_names:
                                i += 1
                            sweep._name = 'SWEEP%i' % i
                            break
                        wavelength.add_sweep(
                            name=sweep.get_name(),
                            sample=sweep.get_xsample(),
                            directory=sweep.get_directory(),
                            image=sweep.get_image(),
                            beam=sweep.get_beam_centre(),
                            reversephi=sweep.get_reversephi(),
                            distance=sweep.get_distance(),
                            gain=sweep.get_gain(),
                            dmin=sweep.get_resolution_high(),
                            dmax=sweep.get_resolution_low(),
                            polarization=sweep.get_polarization(),
                            frames_to_process=sweep.get_frames_to_process(),
                            user_lattice=sweep.get_user_lattice(),
                            user_cell=sweep.get_user_cell(),
                            epoch=sweep._epoch,
                            ice=sweep._ice,
                            excluded_regions=sweep._excluded_regions,
                        )
                        sweep_names.append(sweep.get_name())

    crystals = xinfo.get_crystals()

    failover = params.xia2.settings.failover

    if mp_params.mode == 'parallel' and njob > 1:
        driver_type = mp_params.type
        command_line_args = CommandLine.get_argv()[1:]
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    sweep._get_indexer()
                    sweep._get_refiner()
                    sweep._get_integrater()
                    args.append((group_args(
                        driver_type=driver_type,
                        stop_after=stop_after,
                        failover=failover,
                        command_line_args=command_line_args,
                        nproc=mp_params.nproc,
                        crystal_id=crystal_id,
                        wavelength_id=wavelength_id,
                        sweep_id=sweep.get_name(),
                    ), ))

        from xia2.Driver.DriverFactory import DriverFactory
        default_driver_type = DriverFactory.get_driver_type()

        # run every nth job on the current computer (no need to submit to qsub)
        for i_job, arg in enumerate(args):
            if (i_job % njob) == 0:
                arg[0].driver_type = default_driver_type

        if mp_params.type == "qsub":
            method = "sge"
        else:
            method = "multiprocessing"
        nproc = mp_params.nproc
        qsub_command = mp_params.qsub_command
        if not qsub_command:
            qsub_command = 'qsub'
        qsub_command = '%s -V -cwd -pe smp %d' % (qsub_command, nproc)

        from libtbx import easy_mp
        results = easy_mp.parallel_map(
            process_one_sweep,
            args,
            processes=njob,
            #method=method,
            method="multiprocessing",
            qsub_command=qsub_command,
            preserve_order=True,
            preserve_exception_message=True)

        # Hack to update sweep with the serialized indexers/refiners/integraters
        i_sweep = 0
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                remove_sweeps = []
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    success, output, xsweep_dict = results[i_sweep]
                    if output is not None:
                        Chatter.write(output)
                    if not success:
                        Chatter.write('Sweep failed: removing %s' %
                                      sweep.get_name())
                        remove_sweeps.append(sweep)
                    else:
                        assert xsweep_dict is not None
                        Chatter.write('Loading sweep: %s' % sweep.get_name())
                        from xia2.Schema.XSweep import XSweep
                        new_sweep = XSweep.from_dict(xsweep_dict)
                        sweep._indexer = new_sweep._indexer
                        sweep._refiner = new_sweep._refiner
                        sweep._integrater = new_sweep._integrater
                    i_sweep += 1
                for sweep in remove_sweeps:
                    wavelength.remove_sweep(sweep)
                    sample = sweep.get_xsample()
                    sample.remove_sweep(sweep)

    else:
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                remove_sweeps = []
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    from dials.command_line.show import show_datablocks
                    from dxtbx.datablock import DataBlock
                    Debug.write(sweep.get_name())
                    Debug.write(
                        show_datablocks([DataBlock([sweep.get_imageset()])]))
                    try:
                        if stop_after == 'index':
                            sweep.get_indexer_cell()
                        else:
                            sweep.get_integrater_intensities()
                        sweep.serialize()
                    except Exception as e:
                        if failover:
                            Chatter.write('Processing sweep %s failed: %s' % \
                                          (sweep.get_name(), str(e)))
                            remove_sweeps.append(sweep)
                        else:
                            raise
                for sweep in remove_sweeps:
                    wavelength.remove_sweep(sweep)
                    sample = sweep.get_xsample()
                    sample.remove_sweep(sweep)

    # save intermediate xia2.json file in case scaling step fails
    xinfo.as_json(filename='xia2.json')

    if stop_after not in ('index', 'integrate'):
        Chatter.write(xinfo.get_output(), strip=False)

    for crystal in crystals.values():
        crystal.serialize()

    # save final xia2.json file in case report generation fails
    xinfo.as_json(filename='xia2.json')

    duration = time.time() - start_time

    # write out the time taken in a human readable way
    Chatter.write('Processing took %s' % \
                  time.strftime("%Hh %Mm %Ss", time.gmtime(duration)))

    if stop_after not in ('index', 'integrate'):
        # and the summary file
        with open('xia2-summary.dat', 'w') as fh:
            for record in xinfo.summarise():
                fh.write('%s\n' % record)

        # looks like this import overwrites the initial command line
        # Phil overrides so... for https://github.com/xia2/xia2/issues/150
        from xia2.command_line.html import generate_xia2_html

        if params.xia2.settings.small_molecule == True:
            params.xia2.settings.report.xtriage_analysis = False
            params.xia2.settings.report.include_radiation_damage = False

        generate_xia2_html(xinfo,
                           filename='xia2.html',
                           params=params.xia2.settings.report)

    write_citations()

    # delete all of the temporary mtz files...
    cleanup()
    Environment.cleanup()
Ejemplo n.º 30
0
def run():
    assert os.path.exists('xia2.json')
    from xia2.Schema.XProject import XProject
    xinfo = XProject.from_json(filename='xia2.json')
    generate_xia2_html(xinfo)
Ejemplo n.º 31
0
def exercise_serialization(dials_regression, tmp_dir):
    xia2_demo_data = os.path.join(dials_regression, "xia2_demo_data")
    template = os.path.join(xia2_demo_data, "insulin_1_###.img")

    os.chdir(tmp_dir)

    from xia2.Modules.Indexer.DialsIndexer import DialsIndexer
    from xia2.Modules.Refiner.DialsRefiner import DialsRefiner
    from xia2.Modules.Integrater.DialsIntegrater import DialsIntegrater
    from xia2.Modules.Scaler.CCP4ScalerA import CCP4ScalerA

    from dxtbx.datablock import DataBlockTemplateImporter
    importer = DataBlockTemplateImporter([template])
    datablocks = importer.datablocks
    imageset = datablocks[0].extract_imagesets()[0]

    from xia2.Schema.XProject import XProject
    from xia2.Schema.XCrystal import XCrystal
    from xia2.Schema.XWavelength import XWavelength
    from xia2.Schema.XSweep import XSweep
    from xia2.Schema.XSample import XSample
    proj = XProject()
    proj._name = "PROJ1"
    cryst = XCrystal("CRYST1", proj)
    proj.add_crystal(cryst)
    wav = XWavelength("WAVE1", cryst, wavelength=0.98)
    samp = XSample("X1", cryst)
    cryst.add_wavelength(wav)
    cryst.set_ha_info({'atom': 'S'})
    directory, image = os.path.split(imageset.get_path(1))
    wav.add_sweep(name='SWEEP1', sample=samp, directory=directory, image=image)

    import json
    from dxtbx.serialize.load import _decode_dict

    sweep = wav.get_sweeps()[0]
    indexer = DialsIndexer()
    indexer.set_working_directory(tmp_dir)
    indexer.add_indexer_imageset(imageset)
    indexer.set_indexer_sweep(sweep)
    sweep._indexer = indexer

    refiner = DialsRefiner()
    refiner.set_working_directory(tmp_dir)
    refiner.add_refiner_indexer(sweep.get_epoch(1), indexer)
    sweep._refiner = refiner

    integrater = DialsIntegrater()
    integrater.set_working_directory(tmp_dir)
    integrater.setup_from_image(imageset.get_path(1))
    integrater.set_integrater_refiner(refiner)
    #integrater.set_integrater_indexer(indexer)
    integrater.set_integrater_sweep(sweep)
    integrater.set_integrater_epoch(sweep.get_epoch(1))
    integrater.set_integrater_sweep_name(sweep.get_name())
    integrater.set_integrater_project_info(cryst.get_name(), wav.get_name(),
                                           sweep.get_name())
    sweep._integrater = integrater

    scaler = CCP4ScalerA()
    scaler.add_scaler_integrater(integrater)
    scaler.set_scaler_xcrystal(cryst)
    scaler.set_scaler_project_info(cryst.get_name(), wav.get_name())
    scaler._scalr_xcrystal = cryst
    cryst._scaler = scaler

    s_dict = sweep.to_dict()
    s_str = json.dumps(s_dict, ensure_ascii=True)
    s_dict = json.loads(s_str, object_hook=_decode_dict)
    xsweep = XSweep.from_dict(s_dict)

    w_dict = wav.to_dict()
    w_str = json.dumps(w_dict, ensure_ascii=True)
    w_dict = json.loads(w_str, object_hook=_decode_dict)
    xwav = XWavelength.from_dict(w_dict)
    assert xwav.get_sweeps()[0].get_wavelength() is xwav

    c_dict = cryst.to_dict()
    c_str = json.dumps(c_dict, ensure_ascii=True)
    c_dict = json.loads(c_str, object_hook=_decode_dict)
    xcryst = XCrystal.from_dict(c_dict)
    assert xcryst.get_xwavelength(
        xcryst.get_wavelength_names()[0]).get_crystal() is xcryst

    p_dict = proj.to_dict()
    p_str = json.dumps(p_dict, ensure_ascii=True)
    p_dict = json.loads(p_str, object_hook=_decode_dict)
    xproj = XProject.from_dict(p_dict)
    assert xproj.get_crystals().values()[0].get_project() is xproj

    json_str = proj.as_json()
    xproj = XProject.from_json(string=json_str)
    assert xproj.get_crystals().values()[0].get_project() is xproj
    print(xproj.get_output())
    print("\n".join(xproj.summarise()))
    json_str = xproj.as_json()
    xproj = XProject.from_json(string=json_str)
    assert xproj.get_crystals().values()[0].get_project() is xproj
    xcryst = xproj.get_crystals().values()[0]
    intgr = xcryst._get_integraters()[0]
    assert intgr.get_integrater_finish_done()
    assert xcryst._get_scaler()._sweep_handler.get_sweep_information(
        intgr.get_integrater_epoch()).get_integrater() is intgr

    print(xproj.get_output())
    print("\n".join(xproj.summarise()))
Ejemplo n.º 32
0
def exercise_serialization():
  if not have_dials_regression:
    print "Skipping exercise_serialization(): dials_regression not configured"
    return

  from xia2.Handlers.CommandLine import CommandLine

  xia2_demo_data = os.path.join(dials_regression, "xia2_demo_data")
  template = os.path.join(xia2_demo_data, "insulin_1_###.img")

  cwd = os.path.abspath(os.curdir)
  tmp_dir = os.path.abspath(open_tmp_directory())
  os.chdir(tmp_dir)

  from xia2.Modules.Indexer.DialsIndexer import DialsIndexer
  from xia2.Modules.Refiner.DialsRefiner import DialsRefiner
  from xia2.Modules.Integrater.DialsIntegrater import DialsIntegrater
  from xia2.Modules.Scaler.CCP4ScalerA import CCP4ScalerA

  from dxtbx.datablock import DataBlockTemplateImporter
  importer = DataBlockTemplateImporter([template])
  datablocks = importer.datablocks
  imageset = datablocks[0].extract_imagesets()[0]

  from xia2.Schema.XProject import XProject
  from xia2.Schema.XCrystal import XCrystal
  from xia2.Schema.XWavelength import XWavelength
  from xia2.Schema.XSweep import XSweep
  from xia2.Schema.XSample import XSample
  proj = XProject()
  proj._name = "PROJ1"
  cryst = XCrystal("CRYST1", proj)
  proj.add_crystal(cryst)
  wav = XWavelength("WAVE1", cryst, wavelength=0.98)
  samp = XSample("X1", cryst)
  cryst.add_wavelength(wav)
  cryst.set_ha_info({'atom': 'S'})
  directory, image = os.path.split(imageset.get_path(1))
  wav.add_sweep(name='SWEEP1', sample=samp, directory=directory, image=image)

  import json
  from dxtbx.serialize.load import _decode_dict

  sweep = wav.get_sweeps()[0]
  indexer = DialsIndexer()
  indexer.set_working_directory(tmp_dir)
  indexer.add_indexer_imageset(imageset)
  indexer.set_indexer_sweep(sweep)
  sweep._indexer = indexer

  refiner = DialsRefiner()
  refiner.set_working_directory(tmp_dir)
  refiner.add_refiner_indexer(sweep.get_epoch(1), indexer)
  sweep._refiner = refiner

  integrater = DialsIntegrater()
  integrater.set_working_directory(tmp_dir)
  integrater.setup_from_image(imageset.get_path(1))
  integrater.set_integrater_refiner(refiner)
  #integrater.set_integrater_indexer(indexer)
  integrater.set_integrater_sweep(sweep)
  integrater.set_integrater_epoch(sweep.get_epoch(1))
  integrater.set_integrater_sweep_name(sweep.get_name())
  integrater.set_integrater_project_info(
    cryst.get_name(), wav.get_name(), sweep.get_name())
  sweep._integrater = integrater

  scaler = CCP4ScalerA()
  scaler.add_scaler_integrater(integrater)
  scaler.set_scaler_xcrystal(cryst)
  scaler.set_scaler_project_info(cryst.get_name(), wav.get_name())
  scaler._scalr_xcrystal = cryst
  cryst._scaler = scaler

  s_dict = sweep.to_dict()
  s_str = json.dumps(s_dict, ensure_ascii=True)
  s_dict = json.loads(s_str, object_hook=_decode_dict)
  xsweep = XSweep.from_dict(s_dict)

  w_dict = wav.to_dict()
  w_str = json.dumps(w_dict, ensure_ascii=True)
  w_dict = json.loads(w_str, object_hook=_decode_dict)
  xwav = XWavelength.from_dict(w_dict)
  assert xwav.get_sweeps()[0].get_wavelength() is xwav

  c_dict = cryst.to_dict()
  c_str = json.dumps(c_dict, ensure_ascii=True)
  c_dict = json.loads(c_str, object_hook=_decode_dict)
  xcryst = XCrystal.from_dict(c_dict)
  assert xcryst.get_xwavelength(xcryst.get_wavelength_names()[0]).get_crystal() is xcryst

  p_dict = proj.to_dict()
  p_str = json.dumps(p_dict, ensure_ascii=True)
  p_dict = json.loads(p_str, object_hook=_decode_dict)
  xproj = XProject.from_dict(p_dict)
  assert xproj.get_crystals().values()[0].get_project() is xproj

  json_str = proj.as_json()
  xproj = XProject.from_json(string=json_str)
  assert xproj.get_crystals().values()[0].get_project() is xproj
  print xproj.get_output()
  print "\n".join(xproj.summarise())
  json_str = xproj.as_json()
  xproj = XProject.from_json(string=json_str)
  assert xproj.get_crystals().values()[0].get_project() is xproj
  xcryst = xproj.get_crystals().values()[0]
  intgr = xcryst._get_integraters()[0]
  assert intgr.get_integrater_finish_done()
  assert xcryst._get_scaler()._sweep_handler.get_sweep_information(
    intgr.get_integrater_epoch()).get_integrater() is intgr

  print xproj.get_output()
  print "\n".join(xproj.summarise())
  print