Exemplo n.º 1
0
def run():
    if os.path.exists("xia2-working.phil"):
        sys.argv.append("xia2-working.phil")
    try:
        check_environment()
    except Exception as e:
        traceback.print_exc(file=open("xia2.error", "w"))
        Chatter.write('Status: error "%s"' % str(e))

    # print the version
    Chatter.write(Version)
    Citations.cite("xia2")

    start_time = time.time()

    assert os.path.exists("xia2.json")
    from xia2.Schema.XProject import XProject

    xinfo = XProject.from_json(filename="xia2.json")

    crystals = xinfo.get_crystals()
    for crystal_id, crystal in crystals.iteritems():
        # cwd = os.path.abspath(os.curdir)
        from libtbx import Auto

        scale_dir = PhilIndex.params.xia2.settings.scale.directory
        if scale_dir is Auto:
            scale_dir = "scale"
            i = 0
            while os.path.exists(os.path.join(crystal.get_name(), scale_dir)):
                i += 1
                scale_dir = "scale%i" % i
            PhilIndex.params.xia2.settings.scale.directory = scale_dir
        working_directory = Environment.generate_directory(
            [crystal.get_name(), scale_dir])
        # os.chdir(working_directory)

        crystals[crystal_id]._scaler = None  # reset scaler

        scaler = crystal._get_scaler()
        Chatter.write(xinfo.get_output())
        crystal.serialize()

    duration = time.time() - start_time

    # write out the time taken in a human readable way
    Chatter.write("Processing took %s" %
                  time.strftime("%Hh %Mm %Ss", time.gmtime(duration)))

    # delete all of the temporary mtz files...
    cleanup()

    write_citations()

    xinfo.as_json(filename="xia2.json")

    Environment.cleanup()
Exemplo n.º 2
0
    def cleanup(self):
        out = open('xia2-files.txt', 'w')
        for f in self._temporary_files:
            try:
                os.remove(f)
                out.write('Deleted: %s\n' % f)
            except Exception as e:
                out.write('Failed to delete: %s (%s)\n' % \
                          (f, str(e)))

        for f in self._output_files:
            out.write('Output file (%s): %s\n' % f)

        # copy the log files
        log_directory = Environment.generate_directory('LogFiles')

        for f in self._log_file_keys:
            filename = os.path.join(log_directory,
                                    '%s.log' % f.replace(' ', '_'))
            shutil.copyfile(self._log_files[f], filename)
            out.write('Copied log file %s to %s\n' %
                      (self._log_files[f], filename))

        for f in self._xml_file_keys:
            filename = os.path.join(log_directory,
                                    '%s.xml' % f.replace(' ', '_'))
            shutil.copyfile(self._xml_files[f], filename)
            out.write('Copied xml file %s to %s\n' %
                      (self._xml_files[f], filename))

            for f in self._html_file_keys:
                filename = os.path.join(log_directory,
                                        '%s.html' % f.replace(' ', '_'))
                shutil.copyfile(self._html_files[f], filename)
                out.write('Copied html file %s to %s\n' %
                          (self._html_files[f], filename))

        # copy the data files
        data_directory = Environment.generate_directory('DataFiles')
        for f in self._data_files:
            filename = os.path.join(data_directory, os.path.split(f)[-1])
            shutil.copyfile(f, filename)
            out.write('Copied data file %s to %s\n' % \
                      (f, filename))

        for tag, ext in self._more_data_file_keys:
            filename_out = os.path.join(data_directory,
                                        '%s.%s' % (tag.replace(' ', '_'), ext))
            filename_in = self._more_data_files[(tag, ext)]
            shutil.copyfile(filename_in, filename_out)
            out.write('Copied extra data file %s to %s\n' %
                      (filename_in, filename_out))

        out.close()
Exemplo n.º 3
0
    def get_data_file(self, filename):
        '''Return the point where this data file will end up!'''

        if not filename in self._data_files:
            return filename

        data_directory = Environment.generate_directory('DataFiles')
        return os.path.join(data_directory, os.path.split(filename)[-1])
Exemplo n.º 4
0
    def _get_refiner(self):

        if self._refiner is None:
            # set the working directory for this, based on the hierarchy
            # defined herein...

            # that would be CRYSTAL_ID/WAVELENGTH/SWEEP/index &c.

            if not self.get_wavelength():
                wavelength_id = "default"
                crystal_id = "default"

            else:
                wavelength_id = self.get_wavelength().get_name()
                crystal_id = self.get_wavelength().get_crystal().get_name()

            working_directory = Environment.generate_directory(
                [crystal_id, wavelength_id,
                 self.get_name(), "refine"])

            # FIXME the indexer factory should probably be able to
            # take self [this object] as input, to help with deciding
            # the most appropriate indexer to use... this will certainly
            # be the case for the integrater. Maintaining this link
            # will also help the system cope with updates (which
            # was going to be one of the big problems...)
            # 06/SEP/06 no keep these interfaces separate - want to
            # keep "pure" interfaces to the programs for reuse, then
            # wrap in XStyle.
            self._refiner = RefinerFactory.RefinerForXSweep(self)

            ## set the user supplied lattice if there is one
            # if self._user_lattice:
            # self._indexer.set_indexer_input_lattice(self._user_lattice)
            # self._indexer.set_indexer_user_input_lattice(True)

            ## and also the cell constants - but only if lattice is
            ## assigned

            # if self._user_cell:
            # self._indexer.set_indexer_input_cell(self._user_cell)

            # else:
            # if self._user_cell:
            # raise RuntimeError('cannot assign cell without lattice')

            self._refiner.set_working_directory(working_directory)

            # if self._frames_to_process:
            # frames = self._frames_to_process
            # self._refiner.set_frame_wedge(frames[0], frames[1])

            # self._refiner.set_indexer_sweep_name(self._name)

        self._refiner.add_refiner_indexer(
            self.get_epoch(self._frames_to_process[0]), self._get_indexer())

        return self._refiner
Exemplo n.º 5
0
    def _get_indexer(self):
        """Get my indexer, if set, else create a new one from the
        factory."""

        if self._indexer is None:
            # set the working directory for this, based on the hierarchy
            # defined herein...

            # that would be CRYSTAL_ID/WAVELENGTH/SWEEP/index &c.
            if not self.get_wavelength():
                wavelength_id = "default"
                crystal_id = "default"
                project_id = "default"

            else:
                wavelength_id = self.get_wavelength().get_name()
                crystal_id = self.get_wavelength().get_crystal().get_name()
                project_id = (self.get_wavelength().get_crystal().get_project(
                ).get_name())

            working_directory = Environment.generate_directory(
                [crystal_id, wavelength_id,
                 self.get_name(), "index"])

            # FIXME the indexer factory should probably be able to
            # take self [this object] as input, to help with deciding
            # the most appropriate indexer to use... this will certainly
            # be the case for the integrater. Maintaining this link
            # will also help the system cope with updates (which
            # was going to be one of the big problems...)
            # 06/SEP/06 no keep these interfaces separate - want to
            # keep "pure" interfaces to the programs for reuse, then
            # wrap in XStyle.
            self._indexer = IndexerFactory.IndexerForXSweep(self)

            # set the user supplied lattice if there is one
            if self._user_lattice:
                self._indexer.set_indexer_input_lattice(self._user_lattice)
                self._indexer.set_indexer_user_input_lattice(True)

                # and also the cell constants - but only if lattice is
                # assigned

                if self._user_cell:
                    self._indexer.set_indexer_input_cell(self._user_cell)

            else:
                if self._user_cell:
                    raise RuntimeError("cannot assign cell without lattice")

            self._indexer.set_working_directory(working_directory)

            self._indexer.set_indexer_project_info(project_id, crystal_id,
                                                   wavelength_id)

            self._indexer.set_indexer_sweep_name(self._name)

        return self._indexer
Exemplo n.º 6
0
    def _get_scaler(self):
        if self._scaler is None:

            # in here check if
            #
            # (1) self._scaled_merged_reflections is set and
            # (2) there is no sweep information
            #
            # if both of these are true then produce a null scaler
            # which will wrap this information

            from libtbx import Auto

            scale_dir = PhilIndex.params.xia2.settings.scale.directory
            if scale_dir is Auto:
                scale_dir = "scale"
            working_directory = Environment.generate_directory(
                [self._name, scale_dir])

            self._scaler = Scaler()

            # put an inverse link in place... to support RD analysis
            # involved change to Scaler interface definition

            self._scaler.set_scaler_xcrystal(self)

            if self._anomalous:
                self._scaler.set_scaler_anomalous(True)

            # set up a sensible working directory
            self._scaler.set_working_directory(working_directory)

            # set the reference reflection file, if we have one...
            if self._reference_reflection_file:
                self._scaler.set_scaler_reference_reflection_file(
                    self._reference_reflection_file)

            # and FreeR file
            if self._freer_file:
                self._scaler.set_scaler_freer_file(self._freer_file)

            # and spacegroup information
            if self._user_spacegroup:
                # compute the lattice and pointgroup from this...

                pointgroup = Syminfo.get_pointgroup(self._user_spacegroup)

                self._scaler.set_scaler_input_spacegroup(self._user_spacegroup)
                self._scaler.set_scaler_input_pointgroup(pointgroup)

            integraters = self._get_integraters()

            # then feed them to the scaler

            for i in integraters:
                self._scaler.add_scaler_integrater(i)

        return self._scaler
Exemplo n.º 7
0
Arquivo: XSweep.py Projeto: xia2/xia2
  def _get_refiner(self):

    if self._refiner is None:
      # set the working directory for this, based on the hierarchy
      # defined herein...

      # that would be CRYSTAL_ID/WAVELENGTH/SWEEP/index &c.

      if not self.get_wavelength():
        wavelength_id = "default"
        crystal_id = "default"

      else:
        wavelength_id = self.get_wavelength().get_name()
        crystal_id = self.get_wavelength().get_crystal().get_name()

      working_directory = Environment.generate_directory(
        [crystal_id, wavelength_id, self.get_name(), 'refine'])

      # FIXME the indexer factory should probably be able to
      # take self [this object] as input, to help with deciding
      # the most appropriate indexer to use... this will certainly
      # be the case for the integrater. Maintaining this link
      # will also help the system cope with updates (which
      # was going to be one of the big problems...)
      # 06/SEP/06 no keep these interfaces separate - want to
      # keep "pure" interfaces to the programs for reuse, then
      # wrap in XStyle.
      self._refiner = RefinerFactory.RefinerForXSweep(self)

      ## set the user supplied lattice if there is one
      #if self._user_lattice:
        #self._indexer.set_indexer_input_lattice(self._user_lattice)
        #self._indexer.set_indexer_user_input_lattice(True)

        ## and also the cell constants - but only if lattice is
        ## assigned

        #if self._user_cell:
          #self._indexer.set_indexer_input_cell(self._user_cell)

      #else:
        #if self._user_cell:
          #raise RuntimeError, 'cannot assign cell without lattice'

      self._refiner.set_working_directory(working_directory)

      #if self._frames_to_process:
        #frames = self._frames_to_process
        #self._refiner.set_frame_wedge(frames[0], frames[1])

      #self._refiner.set_indexer_sweep_name(self._name)

    self._refiner.add_refiner_indexer(
      self.get_epoch(self._frames_to_process[0]), self._get_indexer())

    return self._refiner
Exemplo n.º 8
0
  def _get_scaler(self):
    if self._scaler is None:

      # in here check if
      #
      # (1) self._scaled_merged_reflections is set and
      # (2) there is no sweep information
      #
      # if both of these are true then produce a null scaler
      # which will wrap this information

      from libtbx import Auto
      scale_dir = PhilIndex.params.xia2.settings.scale.directory
      if scale_dir is Auto:
        scale_dir = 'scale'
      working_directory = Environment.generate_directory([self._name, scale_dir])

      self._scaler = Scaler()

      # put an inverse link in place... to support RD analysis
      # involved change to Scaler interface definition

      self._scaler.set_scaler_xcrystal(self)

      if self._anomalous:
        self._scaler.set_scaler_anomalous(True)

      # set up a sensible working directory
      self._scaler.set_working_directory(working_directory)

      # set the reference reflection file, if we have one...
      if self._reference_reflection_file:
        self._scaler.set_scaler_reference_reflection_file(
            self._reference_reflection_file)

      # and FreeR file
      if self._freer_file:
        self._scaler.set_scaler_freer_file(self._freer_file)

      # and spacegroup information
      if self._user_spacegroup:
        # compute the lattice and pointgroup from this...

        pointgroup = Syminfo.get_pointgroup(self._user_spacegroup)

        self._scaler.set_scaler_input_spacegroup(
            self._user_spacegroup)
        self._scaler.set_scaler_input_pointgroup(pointgroup)

      integraters = self._get_integraters()

      # then feed them to the scaler

      for i in integraters:
        self._scaler.add_scaler_integrater(i)

    return self._scaler
Exemplo n.º 9
0
    def write_cif(self):
        '''Write CIF to file.'''
        # update audit information for citations
        self.collate_audit_information()

        from xia2.Handlers.Environment import Environment
        data_directory = Environment.generate_directory('DataFiles')
        with open(os.path.join(data_directory, self._outfile), 'w') as fh:
            self._cif.show(out=fh)
Exemplo n.º 10
0
Arquivo: XSweep.py Projeto: xia2/xia2
  def _get_indexer(self):
    '''Get my indexer, if set, else create a new one from the
    factory.'''

    if self._indexer is None:
      # set the working directory for this, based on the hierarchy
      # defined herein...

      # that would be CRYSTAL_ID/WAVELENGTH/SWEEP/index &c.
      if not self.get_wavelength():
        wavelength_id = "default"
        crystal_id = "default"
        project_id = "default"

      else:
        wavelength_id = self.get_wavelength().get_name()
        crystal_id = self.get_wavelength().get_crystal().get_name()
        project_id = self.get_wavelength().get_crystal().get_project().get_name()

      working_directory = Environment.generate_directory(
        [crystal_id, wavelength_id, self.get_name(), 'index'])

      # FIXME the indexer factory should probably be able to
      # take self [this object] as input, to help with deciding
      # the most appropriate indexer to use... this will certainly
      # be the case for the integrater. Maintaining this link
      # will also help the system cope with updates (which
      # was going to be one of the big problems...)
      # 06/SEP/06 no keep these interfaces separate - want to
      # keep "pure" interfaces to the programs for reuse, then
      # wrap in XStyle.
      self._indexer = IndexerFactory.IndexerForXSweep(self)

      # set the user supplied lattice if there is one
      if self._user_lattice:
        self._indexer.set_indexer_input_lattice(self._user_lattice)
        self._indexer.set_indexer_user_input_lattice(True)

        # and also the cell constants - but only if lattice is
        # assigned

        if self._user_cell:
          self._indexer.set_indexer_input_cell(self._user_cell)

      else:
        if self._user_cell:
          raise RuntimeError, 'cannot assign cell without lattice'

      self._indexer.set_working_directory(working_directory)

      self._indexer.set_indexer_project_info(
        project_id, crystal_id, wavelength_id)

      self._indexer.set_indexer_sweep_name(self._name)

    return self._indexer
Exemplo n.º 11
0
def check_environment():
    '''Check the environment we are running in...'''

    if sys.hexversion < 0x02070000:
        raise RuntimeError('Python versions older than 2.7 are not supported')

    import cctbx
    executable = sys.executable
    cctbx_dir = os.sep.join(cctbx.__file__.split(os.sep)[:-3])

    # to help wrapper code - print process id...

    Debug.write('Process ID: %d' % os.getpid())

    Chatter.write('Environment configuration...')
    Chatter.write('Python => %s' % executable)
    Chatter.write('CCTBX => %s' % cctbx_dir)

    ccp4_keys = ['CCP4', 'CLIBD', 'CCP4_SCR']
    for k in ccp4_keys:
        v = Environment.getenv(k)
        if not v:
            raise RuntimeError('%s not defined - is CCP4 set up?' % k)
        if not v == v.strip():
            raise RuntimeError('spaces around "%s"' % v)
        Chatter.write('%s => %s' % (k, v))

    from xia2.Handlers.Flags import Flags
    Chatter.write('Starting directory: %s' % Flags.get_starting_directory())
    Chatter.write('Working directory: %s' % os.getcwd())

    # temporary workaround to bug in pointless...
    if ' ' in os.getcwd():
        raise RuntimeError('Space in working directory ' \
            '(https://github.com/xia2/xia2/issues/114)')
    Chatter.write('Free space:        %.2f GB' % (df() / math.pow(2, 30)))

    try:
        if os.name == 'nt':
            hostname = os.environ['COMPUTERNAME'].split('.')[0]
        else:
            hostname = os.environ['HOSTNAME'].split('.')[0]

        Chatter.write('Host: %s' % hostname)
    except KeyError:
        pass

    Chatter.write('Contact: [email protected]')

    Chatter.write(Version)
Exemplo n.º 12
0
def check_environment():
    """Check the environment we are running in..."""

    if sys.hexversion < 0x02070000:
        raise RuntimeError("Python versions older than 2.7 are not supported")

    import cctbx

    executable = sys.executable
    cctbx_dir = os.sep.join(cctbx.__file__.split(os.sep)[:-3])

    # to help wrapper code - print process id...

    Debug.write("Process ID: %d" % os.getpid())

    Chatter.write("Environment configuration...")
    Chatter.write("Python => %s" % executable)
    Chatter.write("CCTBX => %s" % cctbx_dir)

    ccp4_keys = ["CCP4", "CLIBD", "CCP4_SCR"]
    for k in ccp4_keys:
        v = Environment.getenv(k)
        if not v:
            raise RuntimeError("%s not defined - is CCP4 set up?" % k)
        if not v == v.strip():
            raise RuntimeError('spaces around "%s"' % v)
        Chatter.write("%s => %s" % (k, v))

    from xia2.Handlers.Flags import Flags

    Chatter.write("Starting directory: %s" % Flags.get_starting_directory())
    Chatter.write("Working directory: %s" % os.getcwd())
    Chatter.write("Free space:        %.2f GB" % (df() / math.pow(2, 30)))

    hostname = platform.node().split(".")[0]
    Chatter.write("Host: %s" % hostname)

    Chatter.write("Contact: [email protected]")

    Chatter.write(Version)

    # temporary workaround to bug in pointless...
    if " " in os.getcwd():
        raise RuntimeError("Space in working directory "
                           "(https://github.com/xia2/xia2/issues/114)")
Exemplo n.º 13
0
  def _generate_absorption_map(self, scaler):
    output = scaler.get_all_output()

    aimless = 'AIMLESS, CCP4'
    import re
    pattern = re.compile(" +#+ *CCP4.*#+")
    for line in output:
      if pattern.search(line):
        aimless = re.sub('\s\s+', ', ', line.strip("\t\n #"))
        break

    from xia2.Toolkit.AimlessSurface import evaluate_1degree, \
      scrape_coefficients, generate_map
    coefficients = scrape_coefficients(log=output)
    if coefficients:
      absmap = evaluate_1degree(coefficients)
      absmin, absmax = absmap.min(), absmap.max()
    else:
      absmin, absmax = 1.0, 1.0

    block = CIF.get_block('xia2')
    mmblock = mmCIF.get_block('xia2')
    block["_exptl_absorpt_correction_T_min"] = mmblock["_exptl.absorpt_correction_T_min"] = \
      absmin / absmax # = scaled
    block["_exptl_absorpt_correction_T_max"] = mmblock["_exptl.absorpt_correction_T_max"] = \
      absmax / absmax # = 1
    block["_exptl_absorpt_correction_type"] = mmblock["_exptl.absorpt_correction_type"] = \
      "empirical"
    block["_exptl_absorpt_process_details"] = mmblock["_exptl.absorpt_process_details"] = '''
%s
Scaling & analysis of unmerged intensities, absorption correction using spherical harmonics
''' % aimless

    if absmax - absmin > 0.000001:
      from xia2.Handlers.Environment import Environment
      log_directory = Environment.generate_directory('LogFiles')
      mapfile = os.path.join(log_directory, 'absorption_surface.png')
      generate_map(absmap, mapfile)
    else:
      Debug.write("Cannot create absorption surface: map is too flat (min: %f, max: %f)" % (absmin, absmax))
Exemplo n.º 14
0
  def _generate_absorption_map(self, scaler):
    output = scaler.get_all_output()

    aimless = 'AIMLESS, CCP4'
    import re
    pattern = re.compile(" +#+ *CCP4.*#+")
    for line in output:
      if pattern.search(line):
        aimless = re.sub('\s\s+', ', ', line.strip("\t\n #"))
        break

    from xia2.Toolkit.AimlessSurface import evaluate_1degree, \
      scrape_coefficients, generate_map
    coefficients = scrape_coefficients(log=output)
    if coefficients:
      absmap = evaluate_1degree(coefficients)
      absmin, absmax = absmap.min(), absmap.max()
    else:
      absmin, absmax = 1.0, 1.0

    block = CIF.get_block('xia2')
    mmblock = mmCIF.get_block('xia2')
    block["_exptl_absorpt_correction_T_min"] = mmblock["_exptl.absorpt_correction_T_min"] = \
      absmin / absmax # = scaled
    block["_exptl_absorpt_correction_T_max"] = mmblock["_exptl.absorpt_correction_T_max"] = \
      absmax / absmax # = 1
    block["_exptl_absorpt_correction_type"] = mmblock["_exptl.absorpt_correction_type"] = \
      "empirical"
    block["_exptl_absorpt_process_details"] = mmblock["_exptl.absorpt_process_details"] = '''
%s
Scaling & analysis of unmerged intensities, absorption correction using spherical harmonics
''' % aimless

    if absmax - absmin > 0.000001:
      from xia2.Handlers.Environment import Environment
      log_directory = Environment.generate_directory('LogFiles')
      mapfile = os.path.join(log_directory, 'absorption_surface.png')
      generate_map(absmap, mapfile)
    else:
      Debug.write("Cannot create absorption surface: map is too flat (min: %f, max: %f)" % (absmin, absmax))
Exemplo n.º 15
0
def multi_crystal_analysis(stop_after=None):
  '''Actually process something...'''

  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  crystals = xinfo.get_crystals()
  for crystal_id, crystal in crystals.iteritems():
    cwd = os.path.abspath(os.curdir)
    working_directory = Environment.generate_directory(
      [crystal.get_name(), 'analysis'])
    os.chdir(working_directory)

    from xia2.Wrappers.CCP4.Blend import Blend

    from xia2.lib.bits import auto_logfiler
    hand_blender = Blend()
    hand_blender.set_working_directory(working_directory)
    auto_logfiler(hand_blender)
    Citations.cite('blend')

    scaler = crystal._get_scaler()

    #epoch_to_si = {}
    epoch_to_batches = {}
    epoch_to_integrated_intensities = {}
    epoch_to_sweep_name = {}

    try:
      epochs = scaler._sweep_information.keys()
      for epoch in epochs:
        si = scaler._sweep_information[epoch]
        epoch_to_batches[epoch] = si['batches']
        epoch_to_integrated_intensities[epoch] = si['corrected_intensities']
        epoch_to_sweep_name[epoch] = si['sname']
    except AttributeError, e:
      epochs = scaler._sweep_handler.get_epochs()
      for epoch in epochs:
        si = scaler._sweep_handler.get_sweep_information(epoch)
        epoch_to_batches[epoch] = si.get_batches()
        epoch_to_integrated_intensities[epoch] = si.get_reflections()
        epoch_to_sweep_name[epoch] = si.get_sweep_name()

    unmerged_mtz = scaler.get_scaled_reflections('mtz_unmerged').values()[0]
    from iotbx.reflection_file_reader import any_reflection_file
    reader = any_reflection_file(unmerged_mtz)

    intensities = None
    batches = None
    assert reader.file_type() == 'ccp4_mtz'
    arrays = reader.as_miller_arrays(merge_equivalents=False)
    for ma in arrays:
      if ma.info().labels == ['BATCH']:
        batches = ma
      elif ma.info().labels == ['I', 'SIGI']:
        intensities = ma
      elif ma.info().labels == ['I(+)', 'SIGI(+)', 'I(-)', 'SIGI(-)']:
        intensities = ma

    from xia2.Handlers.Environment import which
    Rscript_binary = which('Rscript', debug=False)
    if Rscript_binary is None:
      Chatter.write('Skipping BLEND analysis: Rscript not available')
    else:
      for epoch in epochs:
        hand_blender.add_hklin(epoch_to_integrated_intensities[epoch],
                               label=epoch_to_sweep_name[epoch])
      hand_blender.analysis()
      Chatter.write("Dendrogram saved to: %s" %hand_blender.get_dendrogram_file())
      analysis = hand_blender.get_analysis()
      summary = hand_blender.get_summary()
      clusters = hand_blender.get_clusters()

      linkage_matrix = hand_blender.get_linkage_matrix()
      ddict = hand_blender.plot_dendrogram()

      rows = []
      headers = ['Cluster', 'Datasets', 'Multiplicity', 'Completeness', 'LCV', 'aLCV']
      completeness = flex.double()
      for i, cluster in clusters.iteritems():
        sel_cluster = flex.bool(batches.size(), False)
        for j in cluster['dataset_ids']:
          batch_start, batch_end = epoch_to_batches[epochs[j-1]]
          sel_cluster |= (
            (batches.data() >= batch_start) & (batches.data() <= batch_end))
        intensities_cluster = intensities.select(sel_cluster)
        merging = intensities_cluster.merge_equivalents()
        merged_intensities = merging.array()
        multiplicities = merging.redundancies()
        completeness.append(merged_intensities.completeness())
        dataset_ids = cluster['dataset_ids']

        rows.append(
          ['%i' %i, ' '.join(['%i'] * len(dataset_ids)) %tuple(dataset_ids),
           '%.1f' %flex.mean(multiplicities.data().as_double()),
           '%.2f' %completeness[-1],
           '%.2f' %cluster['lcv'], '%.2f' %cluster['alcv']])

      # sort table by completeness
      perm = flex.sort_permutation(completeness)
      rows = [rows[i] for i in perm]

      print
      print 'Unit cell clustering summary:'
      print tabulate(rows, headers, tablefmt='rst')
      print

      blend_html = tabulate(rows, headers, tablefmt='html').replace(
        '<table>', '<table class="table table-hover table-condensed">').replace(
    '<td>', '<td style="text-align: right;">')
Exemplo n.º 16
0
    def json_object(self, command_line=""):

        result = {}

        for crystal in sorted(self._crystals):
            xcrystal = self._crystals[crystal]

            cell = xcrystal.get_cell()
            spacegroup = xcrystal.get_likely_spacegroups()[0]

            result["AutoProc"] = {}
            tmp = result["AutoProc"]

            tmp["spaceGroup"] = spacegroup
            for name, value in zip(["a", "b", "c", "alpha", "beta", "gamma"],
                                   cell):
                tmp["refinedCell_%s" % name] = value

            result["AutoProcScalingContainer"] = {}
            tmp = result["AutoProcScalingContainer"]
            tmp["AutoProcScaling"] = {
                "recordTimeStamp":
                time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
            }

            statistics_all = xcrystal.get_statistics()
            reflection_files = xcrystal.get_scaled_merged_reflections()

            wavelength_names = xcrystal.get_wavelength_names()

            for key in statistics_all.keys():
                pname, xname, dname = key

                # FIXME should assert that the dname is a
                # valid wavelength name

                available = statistics_all[key].keys()

                stats = []
                keys = [
                    "High resolution limit",
                    "Low resolution limit",
                    "Completeness",
                    "Multiplicity",
                    "I/sigma",
                    "Rmerge(I+/-)",
                    "CC half",
                    "Anomalous completeness",
                    "Anomalous correlation",
                    "Anomalous multiplicity",
                    "Total observations",
                    "Total unique",
                    "Rmeas(I)",
                    "Rmeas(I+/-)",
                    "Rpim(I)",
                    "Rpim(I+/-)",
                    "Partial Bias",
                ]

                for k in keys:
                    if k in available:
                        stats.append(k)

                xwavelength = xcrystal.get_xwavelength(dname)
                sweeps = xwavelength.get_sweeps()

                tmp["AutoProcScalingStatistics"] = []
                tmp2 = tmp["AutoProcScalingStatistics"]

                for j, name in enumerate(
                    ["overall", "innerShell", "outerShell"]):
                    statistics_cache = {"scalingStatisticsType": name}

                    for s in stats:

                        if s in self._name_map:
                            n = self._name_map[s]
                        else:
                            continue

                        if isinstance(statistics_all[key][s], type([])):
                            statistics_cache[n] = statistics_all[key][s][j]
                        elif isinstance(statistics_all[key][s], type(())):
                            statistics_cache[n] = statistics_all[key][s][j]

                    tmp2.append(statistics_cache)

                tmp["AutoProcIntegrationContainer"] = []
                tmp2 = tmp["AutoProcIntegrationContainer"]
                for sweep in sweeps:
                    if "#" in sweep.get_template():
                        image_name = sweep.get_image_name(0)
                    else:
                        image_name = os.path.join(sweep.get_directory(),
                                                  sweep.get_template())
                    cell = sweep.get_integrater_cell()
                    intgr_tmp = {}
                    for name, value in zip(
                        ["a", "b", "c", "alpha", "beta", "gamma"], cell):
                        intgr_tmp["cell_%s" % name] = value

                    # FIXME this is naughty
                    indxr = sweep._get_indexer()
                    intgr = sweep._get_integrater()

                    start, end = intgr.get_integrater_wedge()

                    intgr_tmp["startImageNumber"] = start
                    intgr_tmp["endImageNumber"] = end

                    intgr_tmp[
                        "refinedDetectorDistance"] = indxr.get_indexer_distance(
                        )

                    beam = indxr.get_indexer_beam_centre_raw_image()

                    intgr_tmp["refinedXBeam"] = beam[0]
                    intgr_tmp["refinedYBeam"] = beam[1]

                    tmp2.append({
                        "Image": {
                            "fileName": os.path.split(image_name)[-1],
                            "fileLocation":
                            sanitize(os.path.split(image_name)[0]),
                        },
                        "AutoProcIntegration": intgr_tmp,
                    })

            # file unpacking nonsense
            result["AutoProcProgramContainer"] = {}
            tmp = result["AutoProcProgramContainer"]
            tmp2 = {}

            if not command_line:
                from xia2.Handlers.CommandLine import CommandLine

                command_line = CommandLine.get_command_line()

            tmp2["processingCommandLine"] = sanitize(command_line)
            tmp2["processingProgram"] = "xia2"

            tmp["AutoProcProgram"] = tmp2
            tmp["AutoProcProgramAttachment"] = []
            tmp2 = tmp["AutoProcProgramAttachment"]

            from xia2.Handlers.Environment import Environment

            data_directory = Environment.generate_directory("DataFiles")

            for k in reflection_files:
                reflection_file = reflection_files[k]

                if not isinstance(reflection_file, type("")):
                    continue

                reflection_file = FileHandler.get_data_file(reflection_file)
                basename = os.path.basename(reflection_file)

                if os.path.isfile(os.path.join(data_directory, basename)):
                    # Use file in DataFiles directory in preference (if it exists)
                    reflection_file = os.path.join(data_directory, basename)

                tmp2.append({
                    "fileType":
                    "Result",
                    "fileName":
                    os.path.split(reflection_file)[-1],
                    "filePath":
                    sanitize(os.path.split(reflection_file)[0]),
                })

            tmp2.append({
                "fileType": "Log",
                "fileName": "xia2.txt",
                "filePath": sanitize(os.getcwd()),
            })

        return result
Exemplo n.º 17
0
  def json_object(self, command_line=''):

    result = {}

    for crystal in sorted(self._crystals):
      xcrystal = self._crystals[crystal]

      cell = xcrystal.get_cell()
      spacegroup = xcrystal.get_likely_spacegroups()[0]

      result['AutoProc'] = {}
      tmp = result['AutoProc']

      tmp['spaceGroup'] = spacegroup
      for name, value in zip(['a', 'b', 'c', 'alpha', 'beta', 'gamma'], cell):
        tmp['refinedCell_%s' % name] = value

      result['AutoProcScalingContainer'] = {}
      tmp = result['AutoProcScalingContainer']
      tmp['AutoProcScaling'] = {
          'recordTimeStamp': time.strftime('%Y-%m-%d %H:%M:%S',
                                           time.localtime())
      }

      statistics_all = xcrystal.get_statistics()
      reflection_files = xcrystal.get_scaled_merged_reflections()

      wavelength_names = xcrystal.get_wavelength_names()

      for key in statistics_all.keys():
        pname, xname, dname = key

        # FIXME should assert that the dname is a
        # valid wavelength name

        available = statistics_all[key].keys()

        stats = []
        keys = [
            'High resolution limit',
            'Low resolution limit',
            'Completeness',
            'Multiplicity',
            'I/sigma',
            'Rmerge(I+/-)',
            'CC half',
            'Anomalous completeness',
            'Anomalous correlation',
            'Anomalous multiplicity',
            'Total observations',
            'Total unique',
            'Rmeas(I)',
            'Rmeas(I+/-)',
            'Rpim(I)',
            'Rpim(I+/-)',
            'Partial Bias'
            ]

        for k in keys:
          if k in available:
            stats.append(k)

        xwavelength = xcrystal.get_xwavelength(dname)
        sweeps = xwavelength.get_sweeps()

        tmp['AutoProcScalingStatistics'] = []
        tmp2 = tmp['AutoProcScalingStatistics']

        for j, name in enumerate(
            ['overall', 'innerShell', 'outerShell']):
          statistics_cache = {'scalingStatisticsType':name}

          for s in stats:

            if s in self._name_map:
              n = self._name_map[s]
            else:
              continue

            if isinstance(statistics_all[key][s], type([])):
              statistics_cache[n] = statistics_all[key][s][j]
            elif isinstance(statistics_all[key][s], type(())):
              statistics_cache[n] = statistics_all[key][s][j]

          tmp2.append(statistics_cache)

        tmp['AutoProcIntegrationContainer'] = []
        tmp2 = tmp['AutoProcIntegrationContainer']
        for sweep in sweeps:
          if '#' in sweep.get_template():
            image_name = sweep.get_image_name(0)
          else:
            image_name = os.path.join(sweep.get_directory(),
                                      sweep.get_template())
          cell = sweep.get_integrater_cell()
          intgr_tmp = {}
          for name, value in zip(['a', 'b', 'c', 'alpha', 'beta', 'gamma'],
                                 cell):
            intgr_tmp['cell_%s' % name] = value

          # FIXME this is naughty
          indxr = sweep._get_indexer()
          intgr = sweep._get_integrater()

          start, end = intgr.get_integrater_wedge()

          intgr_tmp['startImageNumber'] = start
          intgr_tmp['endImageNumber'] = end

          intgr_tmp['refinedDetectorDistance'] = indxr.get_indexer_distance()

          beam = indxr.get_indexer_beam_centre()

          intgr_tmp['refinedXBeam'] = beam[0]
          intgr_tmp['refinedYBeam'] = beam[1]

          tmp2.append(
            {'Image':{'fileName':os.path.split(image_name)[-1],
                      'fileLocation':sanitize(os.path.split(image_name)[0])},
             'AutoProcIntegration': intgr_tmp})

      # file unpacking nonsense
      result['AutoProcProgramContainer'] = {}
      tmp = result['AutoProcProgramContainer']
      tmp2 = {}

      if not command_line:
        from xia2.Handlers.CommandLine import CommandLine
        command_line = CommandLine.get_command_line()

      tmp2['processingCommandLine'] = sanitize(command_line)
      tmp2['processingProgram'] = 'xia2'

      tmp['AutoProcProgram'] = tmp2
      tmp['AutoProcProgramAttachment'] = []
      tmp2 = tmp['AutoProcProgramAttachment']

      from xia2.Handlers.Environment import Environment
      data_directory = Environment.generate_directory('DataFiles')

      for k in reflection_files:
        reflection_file = reflection_files[k]

        if not isinstance(reflection_file, type('')):
          continue

        reflection_file = FileHandler.get_data_file(reflection_file)
        basename = os.path.basename(reflection_file)

        if os.path.isfile(os.path.join(data_directory, basename)):
          # Use file in DataFiles directory in preference (if it exists)
          reflection_file = os.path.join(data_directory, basename)

        tmp2.append({
          'fileType': 'Result',
          'fileName': os.path.split(reflection_file)[-1],
          'filePath': sanitize(os.path.split(reflection_file)[0]),
        })

      tmp2.append({'fileType':'Log',
                   'fileName':'xia2.txt',
                   'filePath':sanitize(os.getcwd())})

    return result
Exemplo n.º 18
0
def xia2_main(stop_after=None):
    '''Actually process something...'''
    Citations.cite('xia2')

    # print versions of related software
    Chatter.write(dials_version())

    ccp4_version = get_ccp4_version()
    if ccp4_version is not None:
        Chatter.write('CCP4 %s' % ccp4_version)

    start_time = time.time()

    CommandLine = get_command_line()
    start_dir = Flags.get_starting_directory()

    # check that something useful has been assigned for processing...
    xtals = CommandLine.get_xinfo().get_crystals()

    no_images = True

    for name in xtals.keys():
        xtal = xtals[name]

        if not xtal.get_all_image_names():

            Chatter.write('-----------------------------------' + \
                          '-' * len(name))
            Chatter.write('| No images assigned for crystal %s |' % name)
            Chatter.write('-----------------------------------' + '-' \
                          * len(name))
        else:
            no_images = False

    args = []

    from xia2.Handlers.Phil import PhilIndex
    params = PhilIndex.get_python_object()
    mp_params = params.xia2.settings.multiprocessing
    njob = mp_params.njob

    from libtbx import group_args

    xinfo = CommandLine.get_xinfo()

    if os.path.exists('xia2.json'):
        from xia2.Schema.XProject import XProject
        xinfo_new = xinfo
        xinfo = XProject.from_json(filename='xia2.json')

        crystals = xinfo.get_crystals()
        crystals_new = xinfo_new.get_crystals()
        for crystal_id in crystals_new.keys():
            if crystal_id not in crystals:
                crystals[crystal_id] = crystals_new[crystal_id]
                continue
            crystals[crystal_id]._scaler = None  # reset scaler
            for wavelength_id in crystals_new[crystal_id].get_wavelength_names(
            ):
                wavelength_new = crystals_new[crystal_id].get_xwavelength(
                    wavelength_id)
                if wavelength_id not in crystals[
                        crystal_id].get_wavelength_names():
                    crystals[crystal_id].add_wavelength(
                        crystals_new[crystal_id].get_xwavelength(
                            wavelength_new))
                    continue
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                sweeps_new = wavelength_new.get_sweeps()
                sweeps = wavelength.get_sweeps()
                sweep_names = [s.get_name() for s in sweeps]
                sweep_keys = [(s.get_directory(), s.get_template(),
                               s.get_image_range()) for s in sweeps]
                for sweep in sweeps_new:
                    if ((sweep.get_directory(), sweep.get_template(),
                         sweep.get_image_range()) not in sweep_keys):
                        if sweep.get_name() in sweep_names:
                            i = 1
                            while 'SWEEEP%i' % i in sweep_names:
                                i += 1
                            sweep._name = 'SWEEP%i' % i
                            break
                        wavelength.add_sweep(
                            name=sweep.get_name(),
                            sample=sweep.get_xsample(),
                            directory=sweep.get_directory(),
                            image=sweep.get_image(),
                            beam=sweep.get_beam_centre(),
                            reversephi=sweep.get_reversephi(),
                            distance=sweep.get_distance(),
                            gain=sweep.get_gain(),
                            dmin=sweep.get_resolution_high(),
                            dmax=sweep.get_resolution_low(),
                            polarization=sweep.get_polarization(),
                            frames_to_process=sweep.get_frames_to_process(),
                            user_lattice=sweep.get_user_lattice(),
                            user_cell=sweep.get_user_cell(),
                            epoch=sweep._epoch,
                            ice=sweep._ice,
                            excluded_regions=sweep._excluded_regions,
                        )
                        sweep_names.append(sweep.get_name())

    crystals = xinfo.get_crystals()

    failover = params.xia2.settings.failover

    if mp_params.mode == 'parallel' and njob > 1:
        driver_type = mp_params.type
        command_line_args = CommandLine.get_argv()[1:]
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    sweep._get_indexer()
                    sweep._get_refiner()
                    sweep._get_integrater()
                    args.append((group_args(
                        driver_type=driver_type,
                        stop_after=stop_after,
                        failover=failover,
                        command_line_args=command_line_args,
                        nproc=mp_params.nproc,
                        crystal_id=crystal_id,
                        wavelength_id=wavelength_id,
                        sweep_id=sweep.get_name(),
                    ), ))

        from xia2.Driver.DriverFactory import DriverFactory
        default_driver_type = DriverFactory.get_driver_type()

        # run every nth job on the current computer (no need to submit to qsub)
        for i_job, arg in enumerate(args):
            if (i_job % njob) == 0:
                arg[0].driver_type = default_driver_type

        if mp_params.type == "qsub":
            method = "sge"
        else:
            method = "multiprocessing"
        nproc = mp_params.nproc
        qsub_command = mp_params.qsub_command
        if not qsub_command:
            qsub_command = 'qsub'
        qsub_command = '%s -V -cwd -pe smp %d' % (qsub_command, nproc)

        from libtbx import easy_mp
        results = easy_mp.parallel_map(
            process_one_sweep,
            args,
            processes=njob,
            #method=method,
            method="multiprocessing",
            qsub_command=qsub_command,
            preserve_order=True,
            preserve_exception_message=True)

        # Hack to update sweep with the serialized indexers/refiners/integraters
        i_sweep = 0
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                remove_sweeps = []
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    success, output, xsweep_dict = results[i_sweep]
                    if output is not None:
                        Chatter.write(output)
                    if not success:
                        Chatter.write('Sweep failed: removing %s' %
                                      sweep.get_name())
                        remove_sweeps.append(sweep)
                    else:
                        assert xsweep_dict is not None
                        Chatter.write('Loading sweep: %s' % sweep.get_name())
                        from xia2.Schema.XSweep import XSweep
                        new_sweep = XSweep.from_dict(xsweep_dict)
                        sweep._indexer = new_sweep._indexer
                        sweep._refiner = new_sweep._refiner
                        sweep._integrater = new_sweep._integrater
                    i_sweep += 1
                for sweep in remove_sweeps:
                    wavelength.remove_sweep(sweep)
                    sample = sweep.get_xsample()
                    sample.remove_sweep(sweep)

    else:
        for crystal_id in crystals.keys():
            for wavelength_id in crystals[crystal_id].get_wavelength_names():
                wavelength = crystals[crystal_id].get_xwavelength(
                    wavelength_id)
                remove_sweeps = []
                sweeps = wavelength.get_sweeps()
                for sweep in sweeps:
                    from dials.command_line.show import show_datablocks
                    from dxtbx.datablock import DataBlock
                    Debug.write(sweep.get_name())
                    Debug.write(
                        show_datablocks([DataBlock([sweep.get_imageset()])]))
                    try:
                        if stop_after == 'index':
                            sweep.get_indexer_cell()
                        else:
                            sweep.get_integrater_intensities()
                        sweep.serialize()
                    except Exception as e:
                        if failover:
                            Chatter.write('Processing sweep %s failed: %s' % \
                                          (sweep.get_name(), str(e)))
                            remove_sweeps.append(sweep)
                        else:
                            raise
                for sweep in remove_sweeps:
                    wavelength.remove_sweep(sweep)
                    sample = sweep.get_xsample()
                    sample.remove_sweep(sweep)

    # save intermediate xia2.json file in case scaling step fails
    xinfo.as_json(filename='xia2.json')

    if stop_after not in ('index', 'integrate'):
        Chatter.write(xinfo.get_output(), strip=False)

    for crystal in crystals.values():
        crystal.serialize()

    # save final xia2.json file in case report generation fails
    xinfo.as_json(filename='xia2.json')

    duration = time.time() - start_time

    # write out the time taken in a human readable way
    Chatter.write('Processing took %s' % \
                  time.strftime("%Hh %Mm %Ss", time.gmtime(duration)))

    if stop_after not in ('index', 'integrate'):
        # and the summary file
        with open('xia2-summary.dat', 'w') as fh:
            for record in xinfo.summarise():
                fh.write('%s\n' % record)

        # looks like this import overwrites the initial command line
        # Phil overrides so... for https://github.com/xia2/xia2/issues/150
        from xia2.command_line.html import generate_xia2_html

        if params.xia2.settings.small_molecule == True:
            params.xia2.settings.report.xtriage_analysis = False
            params.xia2.settings.report.include_radiation_damage = False

        generate_xia2_html(xinfo,
                           filename='xia2.html',
                           params=params.xia2.settings.report)

    write_citations()

    # delete all of the temporary mtz files...
    cleanup()
    Environment.cleanup()
Exemplo n.º 19
0
def load_sweeps_with_common_indexing():
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  import dials # required for gaussian_rs warning
  from xia2.Wrappers.Dials.Reindex import Reindex
  Citations.cite('dials')

  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  import cPickle as pickle
  crystals = xinfo.get_crystals()
  assert len(crystals) == 1
  crystal = next(crystals.itervalues())
  working_directory = Environment.generate_directory([crystal.get_name(), 'analysis'])
  os.chdir(working_directory)

  scaler = crystal._get_scaler()

  epoch_to_batches = {}
  epoch_to_integrated_intensities = {}
  epoch_to_sweep_name = {}

  # Aimless only
  epochs = scaler._sweep_handler.get_epochs()

  reference_cell = None
  reference_lattice = None
  reference_vectors = None
  reference_wavelength = None

  # Reindex each sweep to same setting
  all_miller_indices = flex.miller_index()
  all_two_thetas = flex.double()

  for epoch in epochs:
    si = scaler._sweep_handler.get_sweep_information(epoch)
    Chatter.smallbanner(si.get_sweep_name(), True)
    Debug.smallbanner(si.get_sweep_name(), True)

    intgr = si.get_integrater()
    experiments_filename = intgr.get_integrated_experiments()
    reflections_filename = intgr.get_integrated_reflections()
    refiner = intgr.get_integrater_refiner()
    Debug.write('experiment: %s' % experiments_filename)
    Debug.write('reflection: %s' % reflections_filename)

    # Use setting of first sweep as reference
    if reference_vectors is None:
      reference_vectors = experiments_filename

    # Assume that all sweeps have the same lattice system
    if reference_lattice is None:
      reference_lattice = refiner.get_refiner_lattice()
    else:
      assert reference_lattice == refiner.get_refiner_lattice()
    Debug.write("lattice: %s" % refiner.get_refiner_lattice())

    # Read .json file for sweep
    db = ExperimentListFactory.from_json_file(experiments_filename)

    # Assume that each file only contains a single experiment
    assert (len(db) == 1)
    db = db[0]

    # Get beam vector
    s0 = db.beam.get_unit_s0()

    # Use the unit cell of the first sweep as reference
    if reference_cell is None:
      reference_cell = db.crystal.get_unit_cell()
      Debug.write("Reference cell: %s" % str(reference_cell))

    dials_reindex = Reindex()
    dials_reindex.set_working_directory(working_directory)
    dials_reindex.set_cb_op("auto")
    dials_reindex.set_reference_filename(reference_vectors)
    dials_reindex.set_experiments_filename(experiments_filename)
    dials_reindex.set_indexed_filename(reflections_filename)
    auto_logfiler(dials_reindex)
    dials_reindex.run()

    # Assume that all data are collected at same wavelength
    if reference_wavelength is None:
      reference_wavelength = intgr.get_wavelength()
    else:
      assert abs(reference_wavelength - intgr.get_wavelength()) < 0.01
    Debug.write("wavelength: %f A" % intgr.get_wavelength())
    Debug.write("distance: %f mm" % intgr.get_distance())

    # Get integrated reflection data
    import dials
    with open(dials_reindex.get_reindexed_reflections_filename(), 'rb') as fh:
      reflections = pickle.load(fh)

    selection = reflections.get_flags(reflections.flags.used_in_refinement)
    Chatter.write("Found %d reflections used in refinement (out of %d entries)" % (selection.count(True), len(reflections['miller_index'])))
    reflections = reflections.select(selection)

    # Filter bad reflections
    selection = reflections['intensity.sum.variance'] <= 0
    if selection.count(True) > 0:
      reflections.del_selected(selection)
      print 'Removing %d reflections with negative variance' % \
        selection.count(True)

    if 'intensity.prf.variance' in reflections:
      selection = reflections['intensity.prf.variance'] <= 0
      if selection.count(True) > 0:
        reflections.del_selected(selection)
        print 'Removing %d profile reflections with negative variance' % \
          selection.count(True)

    # Find the observed 2theta angles
    miller_indices = flex.miller_index()
    two_thetas_obs = flex.double()
    for pixel, panel, hkl in zip(reflections['xyzobs.px.value'], reflections['panel'], reflections['miller_index']):
      assert hkl != (0, 0, 0)
      two_thetas_obs.append(db.detector[panel].get_two_theta_at_pixel(s0, pixel[0:2]))
      miller_indices.append(hkl)

    # Convert observed 2theta angles to degrees
    two_thetas_obs = two_thetas_obs * 180 / 3.14159265359
    Chatter.write("Remaining %d reflections are in 2theta range %.3f - %.3f deg" % (len(miller_indices), min(two_thetas_obs), max(two_thetas_obs)))

    all_miller_indices.extend(miller_indices)
    all_two_thetas.extend(two_thetas_obs)

  return all_miller_indices, all_two_thetas, reference_cell, reference_lattice, reference_wavelength
Exemplo n.º 20
0
  def write_xml(self, file, command_line=''):

    fout = open(file, 'w')

    fout.write('<?xml version="1.0"?>')
    fout.write('<AutoProcContainer>\n')

    for crystal in sorted(self._crystals):
      xcrystal = self._crystals[crystal]

      cell = xcrystal.get_cell()
      spacegroup = xcrystal.get_likely_spacegroups()[0]

      fout.write('<AutoProc><spaceGroup>%s</spaceGroup>' % spacegroup)
      self.write_refined_cell(fout, cell)
      fout.write('</AutoProc>')

      fout.write('<AutoProcScalingContainer>')
      fout.write('<AutoProcScaling>')
      self.write_date(fout)
      fout.write('</AutoProcScaling>')

      statistics_all = xcrystal.get_statistics()
      reflection_files = xcrystal.get_scaled_merged_reflections()

      wavelength_names = xcrystal.get_wavelength_names()

      for key in statistics_all.keys():
        pname, xname, dname = key

        # FIXME should assert that the dname is a
        # valid wavelength name

        available = statistics_all[key].keys()

        stats = []
        keys = [
            'High resolution limit',
            'Low resolution limit',
            'Completeness',
            'Multiplicity',
            'I/sigma',
            'Rmerge(I+/-)',
            'CC half',
            'Anomalous completeness',
            'Anomalous correlation',
            'Anomalous multiplicity',
            'Total observations',
            'Total unique',
            'Rmeas(I)',
            'Rmeas(I+/-)',
            'Rpim(I)',
            'Rpim(I+/-)',
            'Partial Bias'
            ]

        for k in keys:
          if k in available:
            stats.append(k)

        xwavelength = xcrystal.get_xwavelength(dname)
        sweeps = xwavelength.get_sweeps()

        for j, name in enumerate(['overall', 'innerShell', 'outerShell']):
          statistics_cache = {}

          for s in stats:
            if isinstance(statistics_all[key][s], type([])):
              statistics_cache[s] = statistics_all[key][s][j]
            elif isinstance(statistics_all[key][s], type(())):
              statistics_cache[s] = statistics_all[key][s][j]

          # send these to be written out
          self.write_scaling_statistics(fout, name, statistics_cache)

        for sweep in sweeps:
          fout.write('<AutoProcIntegrationContainer>\n')
          if '#' in sweep.get_template():
            image_name = sweep.get_image_name(0)
          else:
            image_name = os.path.join(sweep.get_directory(),
                                      sweep.get_template())
          fout.write('<Image><fileName>%s</fileName>' % \
                     os.path.split(image_name)[-1])
          fout.write('<fileLocation>%s</fileLocation></Image>' %
                     sanitize(os.path.split(image_name)[0]))
          fout.write('<AutoProcIntegration>\n')
          cell = sweep.get_integrater_cell()
          self.write_cell(fout, cell)

          # FIXME this is naughty
          intgr = sweep._get_integrater()

          start, end = intgr.get_integrater_wedge()

          fout.write('<startImageNumber>%d</startImageNumber>' % \
                     start)

          fout.write('<endImageNumber>%d</endImageNumber>' % \
                     end)

          # FIXME this is naughty
          indxr = sweep._get_indexer()

          fout.write(
              '<refinedDetectorDistance>%f</refinedDetectorDistance>' % \
              indxr.get_indexer_distance())

          beam = indxr.get_indexer_beam_centre()

          fout.write('<refinedXBeam>%f</refinedXBeam>' % beam[0])
          fout.write('<refinedYBeam>%f</refinedYBeam>' % beam[1])

          fout.write('</AutoProcIntegration>\n')
          fout.write('</AutoProcIntegrationContainer>\n')

      fout.write('</AutoProcScalingContainer>')

      # file unpacking nonsense

      if not command_line:
        from xia2.Handlers.CommandLine import CommandLine
        command_line = CommandLine.get_command_line()

      fout.write('<AutoProcProgramContainer><AutoProcProgram>')
      fout.write('<processingCommandLine>%s</processingCommandLine>' \
                 % sanitize(command_line))
      fout.write('<processingPrograms>xia2</processingPrograms>')
      fout.write('</AutoProcProgram>')

      from xia2.Handlers.Environment import Environment
      data_directory = Environment.generate_directory('DataFiles')
      log_directory = Environment.generate_directory('LogFiles')

      for k in reflection_files:

        reflection_file = reflection_files[k]

        if not isinstance(reflection_file, type('')):
          continue

        reflection_file = FileHandler.get_data_file(reflection_file)

        basename = os.path.basename(reflection_file)
        if os.path.isfile(os.path.join(data_directory, basename)):
          # Use file in DataFiles directory in preference (if it exists)
          reflection_file = os.path.join(data_directory, basename)

        fout.write('<AutoProcProgramAttachment><fileType>Result')
        fout.write('</fileType><fileName>%s</fileName>' % \
                   os.path.split(reflection_file)[-1])
        fout.write('<filePath>%s</filePath>' % \
                   sanitize(os.path.split(reflection_file)[0]))
        fout.write('</AutoProcProgramAttachment>\n')

      import glob
      g = glob.glob(os.path.join(log_directory, '*merging-statistics.json'))
      for merging_stats_json in g:
        fout.write('<AutoProcProgramAttachment><fileType>Graph')
        fout.write('</fileType><fileName>%s</fileName>' %
                   os.path.split(merging_stats_json)[-1])
        fout.write('<filePath>%s</filePath>' % sanitize(log_directory))
        fout.write('</AutoProcProgramAttachment>\n')

      # add the xia2.txt file...

      fout.write('<AutoProcProgramAttachment><fileType>Log')
      fout.write('</fileType><fileName>xia2.txt</fileName>')
      fout.write('<filePath>%s</filePath>' % sanitize(os.getcwd()))
      fout.write('</AutoProcProgramAttachment>\n')

      fout.write('</AutoProcProgramContainer>')

    fout.write('</AutoProcContainer>\n')
    fout.close()
Exemplo n.º 21
0
    def _compute_scaler_statistics(self,
                                   scaled_unmerged_mtz,
                                   selected_band=None,
                                   wave=None):
        ''' selected_band = (d_min, d_max) with None for automatic determination. '''
        # mapping of expected dictionary names to iotbx.merging_statistics attributes
        key_to_var = {
            'I/sigma': 'i_over_sigma_mean',
            'Completeness': 'completeness',
            'Low resolution limit': 'd_max',
            'Multiplicity': 'mean_redundancy',
            'Rmerge(I)': 'r_merge',
            #'Wilson B factor':,
            'Rmeas(I)': 'r_meas',
            'High resolution limit': 'd_min',
            'Total observations': 'n_obs',
            'Rpim(I)': 'r_pim',
            'CC half': 'cc_one_half',
            'Total unique': 'n_uniq',
        }

        anom_key_to_var = {
            'Rmerge(I+/-)': 'r_merge',
            'Rpim(I+/-)': 'r_pim',
            'Rmeas(I+/-)': 'r_meas',
            'Anomalous completeness': 'anom_completeness',
            'Anomalous correlation': 'anom_half_corr',
            'Anomalous multiplicity': 'mean_redundancy',
        }

        stats = {}
        select_result, select_anom_result = None, None

        # don't call self.get_scaler_likely_spacegroups() since that calls
        # self.scale() which introduced a subtle bug
        from cctbx import sgtbx
        sg = sgtbx.space_group_info(str(
            self._scalr_likely_spacegroups[0])).group()
        from xia2.Handlers.Environment import Environment
        log_directory = Environment.generate_directory('LogFiles')
        merging_stats_file = os.path.join(
            log_directory, '%s_%s%s_merging-statistics.txt' %
            (self._scalr_pname, self._scalr_xname,
             '' if wave is None else '_%s' % wave))
        merging_stats_json = os.path.join(
            log_directory, '%s_%s%s_merging-statistics.json' %
            (self._scalr_pname, self._scalr_xname,
             '' if wave is None else '_%s' % wave))

        result, select_result, anom_result, select_anom_result = None, None, None, None
        n_bins = PhilIndex.params.xia2.settings.merging_statistics.n_bins
        import iotbx.merging_statistics
        while result is None:
            try:

                result = self._iotbx_merging_statistics(scaled_unmerged_mtz,
                                                        anomalous=False,
                                                        n_bins=n_bins)
                result.as_json(file_name=merging_stats_json)
                with open(merging_stats_file, 'w') as fh:
                    result.show(out=fh)

                four_column_output = selected_band and any(selected_band)
                if four_column_output:
                    select_result = self._iotbx_merging_statistics(
                        scaled_unmerged_mtz,
                        anomalous=False,
                        d_min=selected_band[0],
                        d_max=selected_band[1],
                        n_bins=n_bins)

                if sg.is_centric():
                    anom_result = None
                    anom_key_to_var = {}
                else:
                    anom_result = self._iotbx_merging_statistics(
                        scaled_unmerged_mtz, anomalous=True, n_bins=n_bins)
                    stats['Anomalous slope'] = [anom_result.anomalous_np_slope]
                    if four_column_output:
                        select_anom_result = self._iotbx_merging_statistics(
                            scaled_unmerged_mtz,
                            anomalous=True,
                            d_min=selected_band[0],
                            d_max=selected_band[1],
                            n_bins=n_bins)

            except iotbx.merging_statistics.StatisticsErrorNoReflectionsInRange:
                # Too few reflections for too many bins. Reduce number of bins and try again.
                result = None
                n_bins = n_bins - 3
                if n_bins > 5:
                    continue
                else:
                    raise

        from six.moves import cStringIO as StringIO
        result_cache = StringIO()
        result.show(out=result_cache)

        for d, r, s in ((key_to_var, result, select_result),
                        (anom_key_to_var, anom_result, select_anom_result)):
            for k, v in d.iteritems():
                if four_column_output:
                    values = (getattr(s.overall, v), getattr(s.bins[0], v),
                              getattr(s.bins[-1], v), getattr(r.overall, v))
                else:
                    values = (getattr(r.overall,
                                      v), getattr(r.bins[0],
                                                  v), getattr(r.bins[-1], v))
                if 'completeness' in v:
                    values = [v_ * 100 for v_ in values]
                if values[0] is not None:
                    stats[k] = values

        return stats
Exemplo n.º 22
0
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  crystals = xinfo.get_crystals()
  for crystal_id, crystal in crystals.iteritems():
    #cwd = os.path.abspath(os.curdir)
    from libtbx import Auto
    scale_dir = PhilIndex.params.xia2.settings.scale.directory
    if scale_dir is Auto:
      scale_dir = 'scale'
      i = 0
      while os.path.exists(os.path.join(crystal.get_name(), scale_dir)):
        i += 1
        scale_dir = 'scale%i' %i
      PhilIndex.params.xia2.settings.scale.directory = scale_dir
    working_directory = Environment.generate_directory(
      [crystal.get_name(), scale_dir])
    #os.chdir(working_directory)

    crystals[crystal_id]._scaler = None # reset scaler

    scaler = crystal._get_scaler()
    Chatter.write(xinfo.get_output())
    crystal.serialize()

  duration = time.time() - start_time

  # write out the time taken in a human readable way
  Chatter.write('Processing took %s' % \
                time.strftime("%Hh %Mm %Ss", time.gmtime(duration)))

  # delete all of the temporary mtz files...
Exemplo n.º 23
0
def multi_crystal_analysis(stop_after=None):
    '''Actually process something...'''

    assert os.path.exists('xia2.json')
    from xia2.Schema.XProject import XProject
    xinfo = XProject.from_json(filename='xia2.json')

    crystals = xinfo.get_crystals()
    for crystal_id, crystal in crystals.iteritems():
        cwd = os.path.abspath(os.curdir)
        working_directory = Environment.generate_directory(
            [crystal.get_name(), 'analysis'])
        os.chdir(working_directory)

        scaler = crystal._get_scaler()

        #epoch_to_si = {}
        epoch_to_batches = {}
        epoch_to_integrated_intensities = {}
        epoch_to_sweep_name = {}
        epoch_to_experiments_filename = {}
        epoch_to_experiments = {}
        sweep_name_to_epoch = {}
        epoch_to_first_image = {}

        from dxtbx.serialize import load
        try:
            epochs = scaler._sweep_information.keys()
            for epoch in epochs:
                si = scaler._sweep_information[epoch]
                epoch_to_batches[epoch] = si['batches']
                epoch_to_integrated_intensities[epoch] = si[
                    'corrected_intensities']
                epoch_to_sweep_name[epoch] = si['sname']
                sweep_name_to_epoch[si['name']] = epoch
                intgr = si['integrater']
                epoch_to_experiments_filename[
                    epoch] = intgr.get_integrated_experiments()
                epoch_to_experiments[epoch] = load.experiment_list(
                    intgr.get_integrated_experiments())

        except AttributeError:
            epochs = scaler._sweep_handler.get_epochs()
            for epoch in epochs:
                si = scaler._sweep_handler.get_sweep_information(epoch)
                epoch_to_batches[epoch] = si.get_batches()
                epoch_to_integrated_intensities[epoch] = si.get_reflections()
                epoch_to_sweep_name[epoch] = si.get_sweep_name()
                sweep_name_to_epoch[si.get_sweep_name()] = epoch
                intgr = si.get_integrater()
                epoch_to_experiments_filename[
                    epoch] = intgr.get_integrated_experiments()
                epoch_to_experiments[epoch] = load.experiment_list(
                    intgr.get_integrated_experiments())

        from xia2.Wrappers.Dials.StereographicProjection import StereographicProjection
        sp_json_files = {}
        for hkl in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
            sp = StereographicProjection()
            auto_logfiler(sp)
            sp.set_working_directory(working_directory)
            for experiments in epoch_to_experiments_filename.values():
                sp.add_experiments(experiments)
            sp.set_hkl(hkl)
            sp.run()
            sp_json_files[hkl] = sp.get_json_filename()

        unmerged_mtz = scaler.get_scaled_reflections(
            'mtz_unmerged').values()[0]
        from iotbx.reflection_file_reader import any_reflection_file
        reader = any_reflection_file(unmerged_mtz)

        from xia2.Wrappers.XIA.PlotMultiplicity import PlotMultiplicity
        mult_json_files = {}
        for axis in ('h', 'k', 'l'):
            pm = PlotMultiplicity()
            auto_logfiler(pm)
            pm.set_working_directory(working_directory)
            pm.set_mtz_filename(unmerged_mtz)
            pm.set_slice_axis(axis)
            pm.set_show_missing(True)
            pm.run()
            mult_json_files[axis] = pm.get_json_filename()

        intensities = None
        batches = None
        assert reader.file_type() == 'ccp4_mtz'
        arrays = reader.as_miller_arrays(merge_equivalents=False)
        for ma in arrays:
            if ma.info().labels == ['BATCH']:
                batches = ma
            elif ma.info().labels == ['I', 'SIGI']:
                intensities = ma
            elif ma.info().labels == ['I(+)', 'SIGI(+)', 'I(-)', 'SIGI(-)']:
                intensities = ma

        from xia2.Wrappers.CCP4.Blend import Blend
        hand_blender = Blend()
        hand_blender.set_working_directory(working_directory)
        auto_logfiler(hand_blender)
        Citations.cite('blend')

        from xia2.Handlers.Environment import which
        Rscript_binary = which('Rscript', debug=False)
        if Rscript_binary is None:
            Chatter.write('Skipping BLEND analysis: Rscript not available')
        else:
            for epoch in epochs:
                hand_blender.add_hklin(epoch_to_integrated_intensities[epoch],
                                       label=epoch_to_sweep_name[epoch])
            hand_blender.analysis()
            Chatter.write("Dendrogram saved to: %s" %
                          hand_blender.get_dendrogram_file())
            analysis = hand_blender.get_analysis()
            summary = hand_blender.get_summary()
            clusters = hand_blender.get_clusters()

            ddict = hand_blender.plot_dendrogram()

            phil_files_dir = 'phil_files'
            if not os.path.exists(phil_files_dir):
                os.makedirs(phil_files_dir)

            rows = []
            headers = [
                'Cluster', 'Datasets', 'Multiplicity', 'Completeness', 'LCV',
                'aLCV', 'Average unit cell'
            ]
            completeness = flex.double()
            average_unit_cell_params = []
            for i, cluster in clusters.iteritems():
                print i
                sel_cluster = flex.bool(batches.size(), False)
                cluster_uc_params = [flex.double() for k in range(6)]
                for j in cluster['dataset_ids']:
                    epoch = epochs[j - 1]
                    batch_start, batch_end = epoch_to_batches[epoch]
                    sel_cluster |= ((batches.data() >= batch_start) &
                                    (batches.data() <= batch_end))
                    expts = epoch_to_experiments.get(epoch)
                    assert expts is not None, (epoch)
                    assert len(expts) == 1, len(expts)
                    expt = expts[0]
                    uc_params = expt.crystal.get_unit_cell().parameters()
                    for k in range(6):
                        cluster_uc_params[k].append(uc_params[k])
                intensities_cluster = intensities.select(sel_cluster)
                merging = intensities_cluster.merge_equivalents()
                merged_intensities = merging.array()
                multiplicities = merging.redundancies()
                completeness.append(merged_intensities.completeness())
                average_unit_cell_params.append(
                    tuple(flex.mean(p) for p in cluster_uc_params))
                dataset_ids = cluster['dataset_ids']

                assert min(dataset_ids) > 0
                with open(
                        os.path.join(phil_files_dir,
                                     'blend_cluster_%i_images.phil' % i),
                        'wb') as f:
                    sweep_names = [
                        hand_blender._labels[dataset_id - 1]
                        for dataset_id in dataset_ids
                    ]
                    for sweep_name in sweep_names:
                        expts = epoch_to_experiments.get(
                            sweep_name_to_epoch.get(sweep_name))
                        assert expts is not None, (
                            sweep_name, sweep_name_to_epoch.get(sweep_name))
                        assert len(expts) == 1, len(expts)
                        expt = expts[0]
                        print >> f, 'xia2.settings.input.image = %s' % expt.imageset.get_path(
                            0)

                rows.append([
                    '%i' % i,
                    ' '.join(['%i'] * len(dataset_ids)) % tuple(dataset_ids),
                    '%.1f' % flex.mean(multiplicities.data().as_double()),
                    '%.2f' % completeness[-1],
                    '%.2f' % cluster['lcv'],
                    '%.2f' % cluster['alcv'],
                    '%g %g %g %g %g %g' % average_unit_cell_params[-1]
                ])

            # sort table by completeness
            perm = flex.sort_permutation(completeness)
            rows = [rows[i] for i in perm]

            print
            print 'Unit cell clustering summary:'
            print tabulate(rows, headers, tablefmt='rst')
            print

            blend_html = tabulate(rows, headers, tablefmt='html').replace(
                '<table>',
                '<table class="table table-hover table-condensed">').replace(
                    '<td>', '<td style="text-align: right;">')

    # XXX what about multiple wavelengths?
    with open('batches.phil', 'wb') as f:
        try:
            for epoch, si in scaler._sweep_information.iteritems():
                print >> f, "batch {"
                print >> f, "  id=%s" % si['sname']
                print >> f, "  range=%i,%i" % tuple(si['batches'])
                print >> f, "}"
        except AttributeError:
            for epoch in scaler._sweep_handler.get_epochs():
                si = scaler._sweep_handler.get_sweep_information(epoch)
                print >> f, "batch {"
                print >> f, "  id=%s" % si.get_sweep_name()
                print >> f, "  range=%i,%i" % tuple(si.get_batches())
                print >> f, "}"

    from xia2.Wrappers.XIA.MultiCrystalAnalysis import MultiCrystalAnalysis
    mca = MultiCrystalAnalysis()
    auto_logfiler(mca, extra="MultiCrystalAnalysis")
    mca.add_command_line_args([
        scaler.get_scaled_reflections(format="sca_unmerged").values()[0],
        "unit_cell=%s %s %s %s %s %s" % tuple(scaler.get_scaler_cell()),
        "batches.phil"
    ])
    mca.set_working_directory(working_directory)
    mca.run()

    intensity_clusters = mca.get_clusters()
    rows = []
    headers = [
        'Cluster', 'Datasets', 'Multiplicity', 'Completeness', 'Height',
        'Average unit cell'
    ]
    completeness = flex.double()
    average_unit_cell_params = []
    for i, cluster in intensity_clusters.iteritems():
        sel_cluster = flex.bool(batches.size(), False)
        cluster_uc_params = [flex.double() for k in range(6)]
        for j in cluster['datasets']:
            epoch = epochs[j - 1]
            batch_start, batch_end = epoch_to_batches[epoch]
            sel_cluster |= ((batches.data() >= batch_start) &
                            (batches.data() <= batch_end))
            expts = epoch_to_experiments.get(epoch)
            assert expts is not None, (epoch)
            assert len(expts) == 1, len(expts)
            expt = expts[0]
            uc_params = expt.crystal.get_unit_cell().parameters()
            for k in range(6):
                cluster_uc_params[k].append(uc_params[k])
        intensities_cluster = intensities.select(sel_cluster)
        merging = intensities_cluster.merge_equivalents()
        merged_intensities = merging.array()
        multiplicities = merging.redundancies()
        completeness.append(merged_intensities.completeness())
        average_unit_cell_params.append(
            tuple(flex.mean(p) for p in cluster_uc_params))
        dataset_ids = cluster['datasets']

        rows.append([
            '%i' % int(i),
            ' '.join(['%i'] * len(dataset_ids)) % tuple(dataset_ids),
            '%.1f' % flex.mean(multiplicities.data().as_double()),
            '%.2f' % completeness[-1],
            '%.2f' % cluster['height'],
            '%g %g %g %g %g %g' % average_unit_cell_params[-1]
        ])

    # sort table by completeness
    perm = flex.sort_permutation(completeness)
    rows = [rows[i] for i in perm]

    print 'Intensity clustering summary:'
    print tabulate(rows, headers, tablefmt='rst')
    print

    intensity_clustering_html = tabulate(
        rows, headers, tablefmt='html').replace(
            '<table>',
            '<table class="table table-hover table-condensed">').replace(
                '<td>', '<td style="text-align: right;">')

    import json

    json_data = {}
    if ddict is not None:
        from xia2.Modules.MultiCrystalAnalysis import scipy_dendrogram_to_plotly_json
        json_data['blend_dendrogram'] = scipy_dendrogram_to_plotly_json(ddict)
    else:
        json_data['blend_dendrogram'] = {'data': [], 'layout': {}}

    json_data['intensity_clustering'] = mca.get_dict()
    del json_data['intensity_clustering']['clusters']

    for hkl in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
        with open(sp_json_files[hkl], 'rb') as f:
            d = json.load(f)
            d['layout'][
                'title'] = 'Stereographic projection (hkl=%i%i%i)' % hkl
            json_data['stereographic_projection_%s%s%s' % hkl] = d

    for axis in ('h', 'k', 'l'):
        with open(mult_json_files[axis], 'rb') as f:
            json_data['multiplicity_%s' % axis] = json.load(f)

    json_str = json.dumps(json_data, indent=2)

    javascript = ['var graphs = %s' % (json_str)]
    javascript.append(
        'Plotly.newPlot(blend_dendrogram, graphs.blend_dendrogram.data, graphs.blend_dendrogram.layout);'
    )
    javascript.append(
        'Plotly.newPlot(intensity_clustering, graphs.intensity_clustering.data, graphs.intensity_clustering.layout);'
    )
    for hkl in ((1, 0, 0), (0, 1, 0), (0, 0, 1)):
        javascript.append(
            'Plotly.newPlot(stereographic_projection_%(hkl)s, graphs.stereographic_projection_%(hkl)s.data, graphs.stereographic_projection_%(hkl)s.layout);'
            % ({
                'hkl': "%s%s%s" % hkl
            }))
    for axis in ('h', 'k', 'l'):
        javascript.append(
            'Plotly.newPlot(multiplicity_%(axis)s, graphs.multiplicity_%(axis)s.data, graphs.multiplicity_%(axis)s.layout);'
            % ({
                'axis': axis
            }))

    html_header = '''
<head>

<!-- Plotly.js -->
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>

<meta name="viewport" content="width=device-width, initial-scale=1" charset="UTF-8">
<link rel="stylesheet" href="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css"/>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
<script src="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
<style type="text/css">

body {
  /*font-family: Helmet, Freesans, Helvetica, Arial, sans-serif;*/
  margin: 8px;
  min-width: 240px;
  margin-left: 5%;
  margin-right: 5%;
}

.plot {
  float: left;
  width: 1200px;
  height: 800px;
  margin-bottom: 20px;
}

.square_plot {
  float: left;
  width: 800px;
  height: 800px;
  margin-bottom: 20px;
}

</style>

</head>

'''

    html_body = '''

<body>

<div class="page-header">
  <h1>Multi-crystal analysis report</h1>
</div>

<div class="panel-group">

  <div class="panel panel-default">
    <div class="panel-heading" data-toggle="collapse" href="#collapse_multiplicity">
      <h4 class="panel-title">
        <a>Multiplicity plots</a>
      </h4>
    </div>
    <div id="collapse_multiplicity" class="panel-collapse collapse">
      <div class="panel-body">
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="multiplicity_h"></div>
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="multiplicity_k"></div>
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="multiplicity_l"></div>
      </div>
    </div>
  </div>

  <div class="panel panel-default">
    <div class="panel-heading" data-toggle="collapse" href="#collapse_stereographic_projection">
      <h4 class="panel-title">
        <a>Stereographic projections</a>
      </h4>
    </div>
    <div id="collapse_stereographic_projection" class="panel-collapse collapse">
      <div class="panel-body">
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="stereographic_projection_100"></div>
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="stereographic_projection_010"></div>
        <div class="col-xs-12 col-sm-12 col-md-12 square_plot" id="stereographic_projection_001"></div>
      </div>
    </div>
  </div>

  <div class="panel panel-default">
    <div class="panel-heading" data-toggle="collapse" href="#collapse_cell">
      <h4 class="panel-title">
        <a>Unit cell clustering</a>
      </h4>
    </div>
    <div id="collapse_cell" class="panel-collapse collapse">
      <div class="panel-body">
        <div class="col-xs-12 col-sm-12 col-md-12 plot" id="blend_dendrogram"></div>
        <div class="table-responsive" style="width: 800px">
          %(blend_html)s
        </div>
      </div>
    </div>
  </div>

  <div class="panel panel-default">
    <div class="panel-heading" data-toggle="collapse" href="#collapse_intensity">
      <h4 class="panel-title">
        <a>Intensity clustering</a>
      </h4>
    </div>
    <div id="collapse_intensity" class="panel-collapse collapse">
      <div class="panel-body">
        <div class="col-xs-12 col-sm-12 col-md-12 plot" id="intensity_clustering" style="height:1000px"></div>
        <div class="table-responsive" style="width: 800px">
          %(intensity_clustering_html)s
        </div>
      </div>
    </div>
  </div>
</div>

<script>
%(script)s
</script>
</body>
    ''' % {
        'script': '\n'.join(javascript),
        'blend_html': blend_html,
        'intensity_clustering_html': intensity_clustering_html
    }

    html = '\n'.join([html_header, html_body])

    print "Writing html report to: %s" % 'multi-crystal-report.html'
    with open('multi-crystal-report.html', 'wb') as f:
        print >> f, html.encode('ascii', 'xmlcharrefreplace')

    write_citations()

    Environment.cleanup()

    return
Exemplo n.º 24
0
    def write_xml(self, file, command_line="", working_phil=None):
        if working_phil is not None:
            PhilIndex.merge_phil(working_phil)
        params = PhilIndex.get_python_object()

        fout = open(file, "w")

        fout.write('<?xml version="1.0"?>')
        fout.write("<AutoProcContainer>\n")

        for crystal in sorted(self._crystals):
            xcrystal = self._crystals[crystal]

            cell = xcrystal.get_cell()
            spacegroup = xcrystal.get_likely_spacegroups()[0]

            fout.write("<AutoProc><spaceGroup>%s</spaceGroup>" % spacegroup)
            self.write_refined_cell(fout, cell)
            fout.write("</AutoProc>")

            fout.write("<AutoProcScalingContainer>")
            fout.write("<AutoProcScaling>")
            self.write_date(fout)
            fout.write("</AutoProcScaling>")

            statistics_all = xcrystal.get_statistics()
            reflection_files = xcrystal.get_scaled_merged_reflections()

            wavelength_names = xcrystal.get_wavelength_names()

            for key in statistics_all.keys():
                pname, xname, dname = key

                # FIXME should assert that the dname is a
                # valid wavelength name

                available = statistics_all[key].keys()

                stats = []
                keys = [
                    "High resolution limit",
                    "Low resolution limit",
                    "Completeness",
                    "Multiplicity",
                    "I/sigma",
                    "Rmerge(I+/-)",
                    "CC half",
                    "Anomalous completeness",
                    "Anomalous correlation",
                    "Anomalous multiplicity",
                    "Total observations",
                    "Total unique",
                    "Rmeas(I)",
                    "Rmeas(I+/-)",
                    "Rpim(I)",
                    "Rpim(I+/-)",
                    "Partial Bias",
                ]

                for k in keys:
                    if k in available:
                        stats.append(k)

                xwavelength = xcrystal.get_xwavelength(dname)
                sweeps = xwavelength.get_sweeps()

                for j, name in enumerate(
                    ["overall", "innerShell", "outerShell"]):
                    statistics_cache = {}

                    for s in stats:
                        if isinstance(statistics_all[key][s], type([])):
                            statistics_cache[s] = statistics_all[key][s][j]
                        elif isinstance(statistics_all[key][s], type(())):
                            statistics_cache[s] = statistics_all[key][s][j]

                    # send these to be written out
                    self.write_scaling_statistics(fout, name, statistics_cache)

                for sweep in sweeps:
                    fout.write("<AutoProcIntegrationContainer>\n")
                    if "#" in sweep.get_template():
                        image_name = sweep.get_image_name(0)
                    else:
                        image_name = os.path.join(sweep.get_directory(),
                                                  sweep.get_template())
                    fout.write("<Image><fileName>%s</fileName>" %
                               os.path.split(image_name)[-1])
                    fout.write("<fileLocation>%s</fileLocation></Image>" %
                               sanitize(os.path.split(image_name)[0]))
                    fout.write("<AutoProcIntegration>\n")
                    cell = sweep.get_integrater_cell()
                    self.write_cell(fout, cell)

                    # FIXME this is naughty
                    intgr = sweep._get_integrater()

                    start, end = intgr.get_integrater_wedge()

                    fout.write("<startImageNumber>%d</startImageNumber>" %
                               start)

                    fout.write("<endImageNumber>%d</endImageNumber>" % end)

                    # FIXME this is naughty
                    indxr = sweep._get_indexer()

                    fout.write(
                        "<refinedDetectorDistance>%f</refinedDetectorDistance>"
                        % indxr.get_indexer_distance())

                    beam = indxr.get_indexer_beam_centre_raw_image()

                    fout.write("<refinedXBeam>%f</refinedXBeam>" % beam[0])
                    fout.write("<refinedYBeam>%f</refinedYBeam>" % beam[1])

                    fout.write("</AutoProcIntegration>\n")
                    fout.write("</AutoProcIntegrationContainer>\n")

            fout.write("</AutoProcScalingContainer>")

            # file unpacking nonsense

            if not command_line:
                from xia2.Handlers.CommandLine import CommandLine

                command_line = CommandLine.get_command_line()

            pipeline = params.xia2.settings.pipeline
            fout.write("<AutoProcProgramContainer><AutoProcProgram>")
            fout.write("<processingCommandLine>%s</processingCommandLine>" %
                       sanitize(command_line))
            fout.write("<processingPrograms>xia2 %s</processingPrograms>" %
                       pipeline)
            fout.write("</AutoProcProgram>")

            from xia2.Handlers.Environment import Environment

            data_directory = Environment.generate_directory("DataFiles")
            log_directory = Environment.generate_directory("LogFiles")

            for k in reflection_files:

                reflection_file = reflection_files[k]

                if not isinstance(reflection_file, type("")):
                    continue

                reflection_file = FileHandler.get_data_file(reflection_file)

                basename = os.path.basename(reflection_file)
                if os.path.isfile(os.path.join(data_directory, basename)):
                    # Use file in DataFiles directory in preference (if it exists)
                    reflection_file = os.path.join(data_directory, basename)

                fout.write("<AutoProcProgramAttachment><fileType>Result")
                fout.write("</fileType><fileName>%s</fileName>" %
                           os.path.split(reflection_file)[-1])
                fout.write("<filePath>%s</filePath>" %
                           sanitize(os.path.split(reflection_file)[0]))
                fout.write("</AutoProcProgramAttachment>\n")

            import glob

            g = glob.glob(
                os.path.join(log_directory, "*merging-statistics.json"))
            for merging_stats_json in g:
                fout.write("<AutoProcProgramAttachment><fileType>Graph")
                fout.write("</fileType><fileName>%s</fileName>" %
                           os.path.split(merging_stats_json)[-1])
                fout.write("<filePath>%s</filePath>" % sanitize(log_directory))
                fout.write("</AutoProcProgramAttachment>\n")

            # add the xia2.txt file...

            fout.write("<AutoProcProgramAttachment><fileType>Log")
            fout.write("</fileType><fileName>xia2.txt</fileName>")
            fout.write("<filePath>%s</filePath>" % sanitize(os.getcwd()))
            fout.write("</AutoProcProgramAttachment>\n")

            fout.write("</AutoProcProgramContainer>")

        fout.write("</AutoProcContainer>\n")
        fout.close()
Exemplo n.º 25
0
    # and the summary file
    summary_records = xinfo.summarise()

    fout = open('xia2-summary.dat', 'w')
    for record in summary_records:
      fout.write('%s\n' % record)
    fout.close()

    from xia2.command_line.html import generate_xia2_html
    generate_xia2_html(xinfo, filename='xia2.html')

  write_citations()

  xinfo.as_json(filename='xia2.json')

  Environment.cleanup()

  return

def run():
  from libtbx.utils import Sorry
  if len(sys.argv) < 2 or '-help' in sys.argv or '--help' in sys.argv:
    help()
    sys.exit()

  try:
    check_environment()
  except exceptions.Exception, e:
    traceback.print_exc(file = open('xia2.error', 'w'))
    Chatter.write('Status: error "%s"' % str(e))
Exemplo n.º 26
0
Arquivo: XSweep.py Projeto: xia2/xia2
  def _get_integrater(self):
    '''Get my integrater, and if it is not set, create one.'''

    if self._integrater is None:

      # set the working directory for this, based on the hierarchy
      # defined herein...

      # that would be CRYSTAL_ID/WAVELENGTH/SWEEP/index &c.

      if not self.get_wavelength():
        wavelength_id = "default"
        crystal_id = "default"
        project_id = "default"

      else:
        wavelength_id = self.get_wavelength().get_name()
        crystal_id = self.get_wavelength().get_crystal().get_name()
        project_id = self.get_wavelength().get_crystal().get_project().get_name()

      working_directory = Environment.generate_directory(
        [crystal_id, wavelength_id, self.get_name(), 'integrate'])

      self._integrater = IntegraterFactory.IntegraterForXSweep(self)

      # configure the integrater with the indexer - unless
      # we don't want to...

      self._integrater.set_integrater_refiner(self._get_refiner())

      Debug.write('Integrater / refiner / indexer for sweep %s: %s/%s/%s' % \
                  (self._name, self._integrater.__class__.__name__,
                   self._get_refiner().__class__.__name__,
                   self._get_indexer().__class__.__name__))

      # or if we have been told this on the command-line -
      # N.B. should really add a mechanism to specify the ice
      # rings we want removing, #1317.

      if PhilIndex.params.xia2.settings.integration.exclude_ice_regions:
        Debug.write('Ice ring region exclusion ON')
        self._integrater.set_integrater_ice(True)

      # or if we were told about ice or specific excluded resolution
      # ranges via the xinfo file
      if self._ice:
        self._integrater.set_integrater_ice(self._ice)

      if self._excluded_regions:
        self._integrater.set_integrater_excluded_regions(
            self._excluded_regions)

      self._integrater.set_integrater_project_info(
        project_id, crystal_id, wavelength_id)

      self._integrater.set_integrater_sweep_name(self._name)

      # copy across anomalous flags in case it's useful - #871

      self._integrater.set_integrater_anomalous(
          self.get_wavelength().get_crystal().get_anomalous())

      # see if we have any useful detector parameters to pass on

      if self.get_gain():
        self._integrater.set_gain(self.get_gain())

      if self.get_polarization():
        self._integrater.set_polarization(self.get_polarization())

      # look to see if there are any global integration parameters
      # we can set...

      if global_integration_parameters.get_parameters(crystal_id):
        Debug.write('Using integration parameters for crystal %s' \
                    % crystal_id)
        self._integrater.set_integrater_parameters(
            global_integration_parameters.get_parameters(crystal_id))

      # frames to process...

      if self._frames_to_process:
        self._integrater._setup_from_imageset(self.get_imageset())
        #frames = self._frames_to_process
        #self._integrater.set_integrater_wedge(frames[0],
                                              #frames[1])
        #self._integrater.set_frame_wedge(frames[0],
                                         #frames[1])
        self._integrater.set_integrater_epoch(
          self.get_epoch(self._frames_to_process[0]))

      self._integrater.set_working_directory(working_directory)

    return self._integrater
Exemplo n.º 27
0
    def _get_integrater(self):
        """Get my integrater, and if it is not set, create one."""

        if self._integrater is None:

            # set the working directory for this, based on the hierarchy
            # defined herein...

            # that would be CRYSTAL_ID/WAVELENGTH/SWEEP/index &c.

            if not self.get_wavelength():
                wavelength_id = "default"
                crystal_id = "default"
                project_id = "default"

            else:
                wavelength_id = self.get_wavelength().get_name()
                crystal_id = self.get_wavelength().get_crystal().get_name()
                project_id = (self.get_wavelength().get_crystal().get_project(
                ).get_name())

            working_directory = Environment.generate_directory(
                [crystal_id, wavelength_id,
                 self.get_name(), "integrate"])

            self._integrater = IntegraterFactory.IntegraterForXSweep(self)

            # configure the integrater with the indexer - unless
            # we don't want to...

            self._integrater.set_integrater_refiner(self._get_refiner())

            Debug.write(
                "Integrater / refiner / indexer for sweep %s: %s/%s/%s" % (
                    self._name,
                    self._integrater.__class__.__name__,
                    self._get_refiner().__class__.__name__,
                    self._get_indexer().__class__.__name__,
                ))

            # or if we have been told this on the command-line -
            # N.B. should really add a mechanism to specify the ice
            # rings we want removing, #1317.

            if PhilIndex.params.xia2.settings.integration.exclude_ice_regions:
                Debug.write("Ice ring region exclusion ON")
                self._integrater.set_integrater_ice(True)

            # or if we were told about ice or specific excluded resolution
            # ranges via the xinfo file
            if self._ice:
                self._integrater.set_integrater_ice(self._ice)

            if self._excluded_regions:
                self._integrater.set_integrater_excluded_regions(
                    self._excluded_regions)

            self._integrater.set_integrater_project_info(
                project_id, crystal_id, wavelength_id)

            self._integrater.set_integrater_sweep_name(self._name)

            # copy across anomalous flags in case it's useful - #871

            self._integrater.set_integrater_anomalous(
                self.get_wavelength().get_crystal().get_anomalous())

            # see if we have any useful detector parameters to pass on

            if self.get_gain():
                self._integrater.set_gain(self.get_gain())

            if self.get_polarization():
                self._integrater.set_polarization(self.get_polarization())

            # look to see if there are any global integration parameters
            # we can set...

            if global_integration_parameters.get_parameters(crystal_id):
                Debug.write("Using integration parameters for crystal %s" %
                            crystal_id)
                self._integrater.set_integrater_parameters(
                    global_integration_parameters.get_parameters(crystal_id))

            # frames to process...

            if self._frames_to_process:
                self._integrater._setup_from_imageset(self.get_imageset())
                # frames = self._frames_to_process
                # self._integrater.set_integrater_wedge(frames[0],
                # frames[1])
                # self._integrater.set_frame_wedge(frames[0],
                # frames[1])
                self._integrater.set_integrater_epoch(
                    self.get_epoch(self._frames_to_process[0]))

            self._integrater.set_working_directory(working_directory)

        return self._integrater