Example #1
0
  def _updated_aimless(self):
    '''Generate a correctly configured Aimless...'''

    aimless = None

    if not self._scalr_corrections:
      aimless = self._factory.Aimless()
    else:

      aimless = self._factory.Aimless(
          partiality_correction = self._scalr_correct_partiality,
          absorption_correction = self._scalr_correct_absorption,
          decay_correction = self._scalr_correct_decay)

    if Flags.get_microcrystal():

      # fiddly little data sets - allow more rapid scaling...

      aimless.set_scaling_parameters('rotation', 2.0)
      if self._scalr_correct_decay:
        aimless.set_bfactor(bfactor=True, brotation = 2.0)

    if Flags.get_small_molecule():
      aimless.set_scaling_parameters('rotation', 15.0)
      aimless.set_bfactor(bfactor=False)

    aimless.set_surface_tie(PhilIndex.params.ccp4.aimless.surface_tie)
    aimless.set_surface_link(PhilIndex.params.ccp4.aimless.surface_link)

    return aimless
Example #2
0
def _prepare_pointless_hklin(working_directory,
                             hklin,
                             phi_width):
  '''Prepare some data for pointless - this will take only 180 degrees
  of data if there is more than this (through a "rebatch" command) else
  will simply return hklin.'''

  # also remove blank images?

  if not Flags.get_microcrystal() and not Flags.get_small_molecule():

    Debug.write('Excluding blank images')

    hklout = os.path.join(
        working_directory,
        '%s_noblank.mtz' % (os.path.split(hklin)[-1][:-4]))

    FileHandler.record_temporary_file(hklout)

    hklin = remove_blank(hklin, hklout)

  # find the number of batches

  md = Mtzdump()
  md.set_working_directory(working_directory)
  auto_logfiler(md)
  md.set_hklin(hklin)
  md.dump()

  batches = max(md.get_batches()) - min(md.get_batches())

  phi_limit = 180

  if batches * phi_width < phi_limit or Flags.get_small_molecule():
    return hklin

  hklout = os.path.join(
      working_directory,
      '%s_prepointless.mtz' % (os.path.split(hklin)[-1][:-4]))

  rb = Rebatch()
  rb.set_working_directory(working_directory)
  auto_logfiler(rb)
  rb.set_hklin(hklin)
  rb.set_hklout(hklout)

  first = min(md.get_batches())
  last = first + int(phi_limit / phi_width)

  Debug.write('Preparing data for pointless - %d batches (%d degrees)' % \
              ((last - first), phi_limit))

  rb.limit_batches(first, last)

  # we will want to delete this one exit
  FileHandler.record_temporary_file(hklout)

  return hklout
Example #3
0
  def _index_prepare(self):

    if self._indxr_images == []:
      self._index_select_images()

    if self._mosflm_autoindex_thresh is None and \
           Flags.get_microcrystal():
      self._mosflm_autoindex_thresh = 5

    return
Example #4
0
  def _index_select_images(self):
    '''Select correct images based on image headers.'''

    if Flags.get_small_molecule():
      return self._index_select_images_small_molecule()

    if Flags.get_microcrystal():
      return self._index_select_images_microcrystal()

    phi_width = self.get_phi_width()
    images = self.get_matching_images()

    if Flags.get_interactive():
      selected_images = index_select_images_user(phi_width, images,
                                                 Chatter)
    else:
      selected_images = index_select_images_lone(phi_width, images)

    for image in selected_images:
      Debug.write('Selected image %s' % image)
      self.add_indexer_image_wedge(image)

    return
Example #5
0
    def run(self):
      '''Run colspot.'''

      #image_header = self.get_header()

      ## crank through the header dictionary and replace incorrect
      ## information with updated values through the indexer
      ## interface if available...

      ## need to add distance, wavelength - that should be enough...

      #if self.get_distance():
        #image_header['distance'] = self.get_distance()

      #if self.get_wavelength():
        #image_header['wavelength'] = self.get_wavelength()

      #if self.get_two_theta():
        #image_header['two_theta'] = self.get_two_theta()

      header = imageset_to_xds(self.get_imageset())

      xds_inp = open(os.path.join(self.get_working_directory(),
                                  'XDS.INP'), 'w')

      # what are we doing?
      xds_inp.write('JOB=COLSPOT\n')
      xds_inp.write('MAXIMUM_NUMBER_OF_PROCESSORS=%d\n' % \
                    self._parallel)

      #if image_header['detector'] in ('pilatus', 'dectris'):
      if self.get_imageset().get_detector()[0].get_type() == 'SENSOR_PAD':
        xds_inp.write('MINIMUM_NUMBER_OF_PIXELS_IN_A_SPOT=%d\n' %
                      self._params.minimum_pixels_per_spot)

      for record in header:
        xds_inp.write('%s\n' % record)

      name_template = os.path.join(self.get_directory(),
                                   self.get_template().replace('#', '?'))

      record = 'NAME_TEMPLATE_OF_DATA_FRAMES=%s\n' % \
               name_template

      xds_inp.write(record)

      xds_inp.write('DATA_RANGE=%d %d\n' % self._data_range)
      for spot_range in self._spot_range:
        xds_inp.write('SPOT_RANGE=%d %d\n' % spot_range)
      xds_inp.write('BACKGROUND_RANGE=%d %d\n' % \
                    self._background_range)

      # microcrystals have very mall spots, perhaps?

      if Flags.get_microcrystal():
        xds_inp.write('MINIMUM_NUMBER_OF_PIXELS_IN_A_SPOT=1\n')

      if Flags.get_small_molecule():
        xds_inp.write('STRONG_PIXEL=5\n')
        # FIXME should probably be moved to a phil parameter

      xds_inp.close()

      # copy the input file...
      shutil.copyfile(os.path.join(self.get_working_directory(),
                                   'XDS.INP'),
                      os.path.join(self.get_working_directory(),
                                   '%d_COLSPOT.INP' % self.get_xpid()))

      # write the input data files...

      for file_name in self._input_data_files_list:
        src = self._input_data_files[file_name]
        dst = os.path.join(
            self.get_working_directory(), file_name)
        if src != dst:
          shutil.copyfile(src, dst)

      self.start()
      self.close_wait()

      xds_check_version_supported(self.get_all_output())

      # copy the LP file
      shutil.copyfile(os.path.join(self.get_working_directory(),
                                   'COLSPOT.LP'),
                      os.path.join(self.get_working_directory(),
                                   '%d_COLSPOT.LP' % self.get_xpid()))

      # gather the output files

      for file in self._output_data_files_list:
        self._output_data_files[file] = os.path.join(
          self.get_working_directory(), file)

      return
Example #6
0
    def run(self):
      '''Actually run XSCALE.'''

      self._write_xscale_inp()

      # copy the input file...
      shutil.copyfile(os.path.join(self.get_working_directory(),
                                   'XSCALE.INP'),
                      os.path.join(self.get_working_directory(),
                                   '%d_XSCALE.INP' % self.get_xpid()))

      self.start()
      self.close_wait()

      # copy the LP file
      shutil.copyfile(os.path.join(self.get_working_directory(),
                                   'XSCALE.LP'),
                      os.path.join(self.get_working_directory(),
                                   '%d_XSCALE.LP' % self.get_xpid()))

      # now look at XSCALE.LP
      xds_check_error(self.get_all_output())

      dname = None

      # get the outlier reflections... and the overall scale factor
      for line in open(os.path.join(
          self.get_working_directory(),
          'XSCALE.LP'), 'r').readlines():
        if '"alien"' in line:
          h, k, l = tuple(map(int, line.split()[:3]))
          z = float(line.split()[4])
          if not (h, k, l, z) in self._remove:
            self._remove.append((h, k, l, z))

        if 'FACTOR TO PLACE ALL DATA SETS TO ' in line:
          self._scale_factor = float(line.split()[-1])

        if 'STATISTICS OF SCALED OUTPUT DATA SET' in line:
          dname = line.split()[-1].replace('.HKL', '')

        if 'total' in line and not dname in self._rmerges:
          if len(line.split()) > 5:
            self._rmerges[dname] = float(
                line.replace('%', '').split()[5])

        # trac #419 - if the data sets are not correctly indexed,
        # throw an exception. N.B. this will only work if the
        # data sets are moderately complete (i.e. there are more
        # than a handful of common reflections) - which may not be
        # the case in MULTICRYSTAL mode.

        if ' !!! WARNING !!! ' in line and \
               'CORRELATION FACTORS ARE DANGEROUSLY SMALL' in line:
          groups = get_correlation_coefficients_and_group(
              os.path.join(self.get_working_directory(),
                           'XSCALE.LP'))
          Debug.write('Low correlations - check data sets')
          for j, name in enumerate(groups):
            Debug.write('Group %d' % j)
            for file_name in groups[name]:
              Debug.write(file_name)

          if not Flags.get_microcrystal():
            raise RuntimeError, 'reindexing error: %s' % \
                  os.path.join(self.get_working_directory(),
                               'XSCALE.LP')

      return
Example #7
0
    def run(self):
      '''Run mosflm integration'''

      assert self._space_group_number is not None

      summary_file = 'summary_%s.log' %self._space_group_number
      self.add_command_line('SUMMARY')
      self.add_command_line(summary_file)

      self.start()

      if not self._refine_profiles:
        self.input('profile nooptimise')

      if [self._pname, self._xname, self._dname].count(None) == 0:
        self.input('harvest on')
        self.input('pname %s' %self._pname)
        self.input('xname %s' %self._xname)
        self.input('dname %s' %self._dname)

      if self._reverse_phi:
        self.input('detector reversephi')

      assert self._template is not None and self._directory is not None
      self.input('template "%s"' %self._template)
      self.input('directory "%s"' %self._directory)

      if self._exclude_ice:
        for record in open(os.path.abspath(os.path.join(
            os.path.dirname(__file__), '..', '..',
            'Data', 'ice-rings.dat'))).readlines():
          resol = tuple(map(float, record.split()[:2]))
          self.input('resolution exclude %.2f %.2f' % (resol))

      if self._exclude_regions is not None:
        for upper, lower in self._exclude_regions:
          self.input('resolution exclude %.2f %.2f' % (upper, lower))

      for instruction in self._instructions:
        self.input(instruction)

      self.input('matrix %s' %self._input_mat_file)

      assert self._beam_centre is not None
      assert self._distance is not None
      assert self._mosaic is not None
      self.input('beam %f %f' %tuple(self._beam_centre))
      self.input('distance %f' %self._distance)
      self.input('mosaic %f' %self._mosaic)
      if self._unit_cell is not None:
        self.input('cell %f %f %f %f %f %f' %self._unit_cell)

      self.input('refinement include partials')

      if self._wavelength is not None:
        self.input('wavelength %f' %self._wavelength)

      if len(self._parameters):
        for p, v in self._parameters.items():
          self.input('%s %s' % (p, str(v)))

      self.input('symmetry %d' %self._space_group_number)

      if self._gain is not None:
        self.input('gain %5.2f' %self._gain)

      # check for resolution limits
      if self._d_min is not None:
        if self._d_max is not None:
          self.input('resolution %f %f' %(self._d_min, self._d_max))
        else:
          self.input('resolution %f' %self._d_min)

      if self._mask is not None:
        record = 'limits quad'
        for m in self._mask:
          record += ' %.1f %.1f' % m
        self.input(record)

      # set up the integration
      self.input('postref fix all')
      self.input('postref maxresidual 5.0')

      if self._lim_x is not None and self._lim_y is not None:
        self.input('limits xscan %f yscan %f' % (self._lim_x, self._lim_y))

      if self._fix_mosaic:
        self.input('postref fix mosaic')

      #self.input('separation close')

      ## XXX FIXME this is a horrible hack - I at least need to
      ## sand box this ...
      #if self.get_header_item('detector') == 'raxis':
        #self.input('adcoffset 0')

      genfile = os.path.join(os.environ['CCP4_SCR'],
                             '%d_mosflm.gen' % self.get_xpid())

      self.input('genfile %s' % genfile)

      # add an extra chunk of orientation refinement

      # XXX FIXME
      from xia2.Handlers.Flags import Flags
      if Flags.get_microcrystal():
        a = self._image_range[0]
        if self._image_range[1] - self._image_range[0] > 20:
          b = a + 20
        else:
          b = self._image_range[1]

        self.input('postref segment 1 fix all')
        self.input('process %d %d' % (a, b))
        self.input('go')
        self.input('postref nosegment')

        self.input('separation close')
        self.input('process %d %d block %d' % \
                   (self._image_range[0],
                    self._image_range[1],
                    1 + self._image_range[1] - self._image_range[0]))

      else:
        if self._pre_refinement:
          a, b = self._image_range

          if b - a > 3:
            b = a + 3

          self.input('postref multi segments 1')
          self.input('process %d %d' % (a, b))
          self.input('go')

          self.input('postref nosegment')

          if self._fix_mosaic:
            self.input('postref fix mosaic')

        self.input('separation close')
        self.input(
          'process %d %d' %(self._image_range[0], self._image_range[1]))

      self.input('go')

      # that should be everything
      self.close_wait()

      # get the log file
      output = self.get_all_output()

      integrated_images_first = 1.0e6
      integrated_images_last = -1.0e6

      # look for major errors

      for i in range(len(output)):
        o = output[i]
        if 'LWBAT: error in ccp4_lwbat' in o:
          raise RuntimeError, 'serious mosflm error - inspect %s' % \
                self.get_log_file()

      mosaics = []

      for i in range(len(output)):
        o = output[i]

        if 'Integrating Image' in o:
          batch = int(o.split()[2])
          if batch < integrated_images_first:
            integrated_images_first = batch
          if batch > integrated_images_last:
            integrated_images_last = batch

        if 'Smoothed value for refined mosaic' in o:
          mosaics.append(float(o.split()[-1]))

        if 'ERROR IN DETECTOR GAIN' in o:

          self._detector_gain_error = True

          # look for the correct gain
          for j in range(i, i + 10):
            if output[j].split()[:2] == ['set', 'to']:
              gain = float(output[j].split()[-1][:-1])

              # check that this is not the input
              # value... Bug # 3374

              if self._gain:

                if math.fabs(gain - self._gain) > 0.02:
                  self._suggested_gain = gain

              else:
                self._suggested_gain = gain

        # FIXME if mosaic spread refines to a negative value
        # once the lattice has passed the triclinic postrefinement
        # test then fix this by setting "POSTREF FIX MOSAIC" and
        # restarting.

        if 'Smoothed value for refined mosaic spread' in o:
          mosaic = float(o.split()[-1])
          if mosaic < 0.0:
            raise IntegrationError, 'negative mosaic spread'

        if 'WRITTEN OUTPUT MTZ FILE' in o:
          self._mosflm_hklout = os.path.join(
              self.get_working_directory(),
              output[i + 1].split()[-1])

        if 'Number of Reflections' in o:
          self._nref = int(o.split()[-1])

        # if a BGSIG error happened try not refining the
        # profile and running again...

        if 'BGSIG too large' in o:
          self._bgsig_too_large = True

        if 'An unrecoverable error has occurred in GETPROF' in o:
          self._getprof_error = True

        if 'MOSFLM HAS TERMINATED EARLY' in o:
          raise RuntimeError, \
                'integration failed: reason unknown (log %s)' % \
                self.get_log_file()

      if not self._mosflm_hklout:
        raise RuntimeError, 'processing abandoned'

      self._batches_out = (integrated_images_first, integrated_images_last)

      self._mosaic_spreads = mosaics

      self._per_image_statistics = _parse_mosflm_integration_output(output)

      # inspect the output for e.g. very high weighted residuals

      images = self._per_image_statistics.keys()
      images.sort()

      # FIXME bug 2175 this should probably look at the distribution
      # of values rather than the peak, since this is probably a better
      # diagnostic of a poor lattice.

      residuals = []
      for i in images:
        if self._per_image_statistics[i].has_key('weighted_residual'):
          residuals.append(self._per_image_statistics[i]['weighted_residual'])

      self._residuals = residuals

      try:
        self._postref_result = _parse_summary_file(
          os.path.join(self.get_working_directory(), summary_file))
      except AssertionError, e:
        self._postref_result = { }