Ejemplo n.º 1
0
def run(args):
  import os
  to_pickle = "--pickle" in args
  for file_name in args:
    if (file_name.startswith("--")): continue
    print file_name + ":"
    f = open(file_name, "r")
    t0 = os.times()
    reflection_file = cns_reflection_file(f)
    tn = os.times()
    t_parse = tn[0]+tn[1]-t0[0]-t0[1]
    f.close()
    reflection_file.show_summary()
    print
    crystal_symmetry = crystal.symmetry((), "P 1")
    miller_arrays = reflection_file.as_miller_arrays(crystal_symmetry)
    for miller_array in miller_arrays:
      miller_array.show_summary()
      print
    if (to_pickle):
      pickle_file_name = os.path.split(file_name)[1] + ".pickle"
      t0 = os.times()
      easy_pickle.dump(pickle_file_name, reflection_file)
      tn = os.times()
      t_dump = tn[0]+tn[1]-t0[0]-t0[1]
      t0 = os.times()
      easy_pickle.load(pickle_file_name)
      tn = os.times()
      t_load = tn[0]+tn[1]-t0[0]-t0[1]
      print "parse: %.2f, dump: %.2f, load: %.2f" % (t_parse, t_dump, t_load)
    print
  t = os.times()
  print "u+s,u,s: %.2f %.2f %.2f" % (t[0] + t[1], t[0], t[1])
Ejemplo n.º 2
0
 def __init__(self,
              init,
              iterable,
              input_type='image'):
   self.init = ep.load(init)
   self.iterable = ep.load(iterable)
   self.type = input_type
Ejemplo n.º 3
0
 def __init__ (self, dir_name) :
   self.dir_name = dir_name
   self._distl = None
   self._labelit = None
   self._groups = None
   distl_file = os.path.join(dir_name, "DISTL_pickle")
   labelit_file = os.path.join(dir_name, "LABELIT_pickle")
   groups_file = os.path.join(dir_name, "LABELIT_possible")
   if (os.path.exists(distl_file)) :
     self._distl = easy_pickle.load(distl_file)
   if (os.path.exists(labelit_file)) :
     self._labelit = easy_pickle.load(labelit_file)
   if (os.path.exists(groups_file)) :
     self._groups = easy_pickle.load(groups_file)
Ejemplo n.º 4
0
  def analyze_prior_results(self, analysis_source):
    """ Runs analysis of previous grid search / integration results, used in an
        analyze-only mode """

    from prime.iota.iota_analysis import Analyzer
    from libtbx import easy_pickle as ep

    if os.path.isdir(analysis_source):
      int_folder = os.path.abspath(analysis_source)
    else:
      try:
        int_folder = os.path.abspath(os.path.join(os.curdir,
                     'integration/{}/image_objects'.format(analysis_source)))
      except ValueError:
        print 'Run #{} not found'.format(analysis_source)

    if os.path.isdir(int_folder):
      int_list = [os.path.join(int_folder, i) for i in os.listdir(int_folder)]
      img_objects = [ep.load(i) for i in int_list]

      analysis = Analyzer(img_objects, None, self.iver, self.now)
      analysis.print_results()
      analysis.unit_cell_analysis(self.params.analysis.cluster_threshold,
                                  int_folder, False)
      analysis.print_summary(None)
      analysis.show_heatmap()
    else:
      print 'No results found in {}'.format(int_folder)
Ejemplo n.º 5
0
def exercise_02():
  file = libtbx.env.find_in_repositories(relative_path=
    "chem_data/polygon_data/all_mvd.pickle", test=os.path.isfile)
  database_dict = easy_pickle.load(file)
  #
  r_work_pdb = database_dict["pdb_header_r_work"]
  r_work_cutoff = database_dict["r_work_cutoffs"]
  r_work_re_computed = database_dict["r_work"]
  name = database_dict["pdb_code"]
  print "size: ",name.size()
  #
  sel = r_work_pdb != "none"
  print "NO R in PDB:", sel.count(False)
  #
  r_work_pdb = r_work_pdb.select(sel)
  r_work_cutoff = r_work_cutoff.select(sel)
  r_work_re_computed = r_work_re_computed.select(sel)
  name = name.select(sel)
  #
  def str_to_float(x):
    tmp = flex.double()
    for x_ in x:
      tmp.append(float(x_))
    return tmp
  #
  r_work_cutoff = str_to_float(r_work_cutoff)
  r_work_re_computed = str_to_float(r_work_re_computed)
  r_work_pdb = str_to_float(r_work_pdb)
  #
  delta1 = (r_work_cutoff - r_work_pdb)*100.
  delta2 = (r_work_re_computed - r_work_pdb)*100.
  print "Number with better R: ", (delta1 <=0.).count(True), (delta2 <=0.).count(True)
  print "Number with worse  R: ", (delta1 >0.).count(True), (delta2 >0.).count(True)
Ejemplo n.º 6
0
  def __init__(self, pickle, img_location, pixel_size, proceed_without_image=False):
    # unpickle pickle file and keep track of image location
    if img_location is None:
      if proceed_without_image:
        self.img_location = []
      else:
        raise Sorry, "No image found at specified location. Override by setting proceed_without_image to False" \
        + "to produce experiment lists that may only be read when check_format is False."
    else:
      self.img_location = [img_location]

    # pickle can be already unpickled, if so, loading it will fail with an AttributeError. A load
    # error will fail with EOFError
    try:
      self.data = easy_pickle.load(pickle)
    except EOFError:
      self.data = None
      self.pickle = None
    except AttributeError:
      self.data = pickle
      self.pickle = None
    else:
      self.pickle = pickle
    if self.data is not None:
      self.length = len(self.data['observations'][0].data())
      self.pixel_size = pixel_size
Ejemplo n.º 7
0
def _get_classifier(svm_name=None):
  """
  If need be, initializes, and then returns a classifier trained to
  differentiate between different ions and water. Also returns of options for
  gathering features.

  To use the classifier, you will need to pass it to
  svm.libsvm.svm_predict_probability. Ion prediction is already encapsulated by
  predict_ion, so most users should just call that.

  Parameters
  ----------
  svm_name : str, optional
      The SVM to use for prediction. By default, the SVM trained on heavy atoms
      and calcium in the presence of anomalous data is used. See
      chem_data/classifiers for a full list of SVMs available.

  Returns
  -------
  svm.svm_model
      The libsvm classifier used to predict the identities of ion sites.
  dict of str, bool
      Options to pass to ion_vector when collecting features about these sites.
  tuple of ((tuple of numpy.array of float, numpy.array of float),
            tuple of float)
      The scaling parameters passed to scale_to.
  numpy.array of bool
      The features of the vector that were selected as important for
      classification. Useful for both asserting that ion_vector is returning
      something of the correct size as well as only selection features that
      actually affect classification.

  See Also
  --------
  svm.libsvm.svm_predict_probability
  mmtbx.ions.svm.predict_ion
  phenix_dev.ion_identification.nader_ml.ions_train_svms
  """
  assert (svmutil is not None)
  global _CLASSIFIER, _CLASSIFIER_OPTIONS

  if not svm_name or str(svm_name) == "Auto" :
    svm_name = _DEFAULT_SVM_NAME

  if svm_name not in _CLASSIFIER:
    svm_path = os.path.join(CLASSIFIERS_PATH, "{}.model".format(svm_name))
    options_path = os.path.join(CLASSIFIERS_PATH,
                                "{}_options.pkl".format(svm_name))
    try:
      _CLASSIFIER[svm_name] = svmutil.svm_load_model(svm_path)
    except IOError as err:
      if err.errno != errno.ENOENT:
        raise err
      else:
        _CLASSIFIER[svm_name] = None
        _CLASSIFIER_OPTIONS[svm_name] = (None, None, None)
    _CLASSIFIER_OPTIONS[svm_name] = load(options_path)

  vector_options, scaling, features = _CLASSIFIER_OPTIONS[svm_name]
  return _CLASSIFIER[svm_name], vector_options, scaling, features
Ejemplo n.º 8
0
def exercise_00():
  file = libtbx.env.find_in_repositories(relative_path=
    "chem_data/polygon_data/all_mvd.pickle", test=os.path.isfile)
  database_dict = easy_pickle.load(file)
  #
  i_1l2h = list(database_dict["pdb_code"]).index("1l2h")
  found = 0
  for key in database_dict.keys():
    #print key, ": ",database_dict[key][i_1l2h]
    #
    value = database_dict[key][i_1l2h]
    if(key == "unit_cell"):
      assert "(53.89, 53.89, 77.36, 90, 90, 90)" == value.strip()
      found += 1
    if(key == "high_resolution"):
      assert approx_equal(1.54, float(value))
      found += 1
    if(key == "pdb_header_tls"):
      assert "false (n_groups: 0)" == value.strip()
      found += 1
    if(key == "test_set_size"):
      assert approx_equal(0.0476, float(value),0002)
      found += 1
    if(key == "twinned"):
      assert "h,-k,-l" == value.strip()
      found += 1
  #
  assert found == 5
Ejemplo n.º 9
0
  def __init__(self, scratch_dir):
    pickle_dirname = "pickle"
    pickle_basename = "pkl_"
    self.sum_img = None
    self.sumsq_img = None
    self.nmemb = 0
    path_pattern = "%s/%s/%ss[0-9][0-9]-[0-9].pickle" %(
      scratch_dir, pickle_dirname, pickle_basename)
    g = glob.glob(path_pattern)
    if len(g) == 0:
      print "No files found matching pattern: %s" %path_pattern
      return
    for path in g:
      try:
        d = easy_pickle.load(file_name=path)
      except EOFError:
        print "EOFError: skipping %s:" %path
        continue
      if self.sum_img is None:
        self.sum_img = d["sum_img"]
        self.sumsq_img = d["sumsq_img"]
      else:
        self.sum_img += d["sum_img"]
        self.sumsq_img += d["sumsq_img"]
      self.nmemb += d["nmemb"]
      print "Read %d images from %s" % (d["nmemb"], path)
      #print "Si foil length: %s" %(d["sifoil"])

    print "Number of images used: %i" %self.nmemb
    assert self.nmemb > 0
Ejemplo n.º 10
0
  def analyze_prior_results(self, analysis_source):
    """ Runs analysis of previous grid search / integration results, used in an
        analyze-only mode """

    from iota.components.iota_analysis import Analyzer
    from libtbx import easy_pickle as ep

    if os.path.isdir(analysis_source):
      int_folder = os.path.abspath(analysis_source)
    else:
      try:
        int_folder = os.path.abspath(os.path.join(os.curdir,
                     'integration/{}/image_objects'.format(analysis_source)))
      except ValueError:
        print 'Run #{} not found'.format(analysis_source)

    if os.path.isdir(int_folder):
      int_list = [os.path.join(int_folder, i) for i in os.listdir(int_folder)]
      img_objects = [ep.load(i) for i in int_list if i.endswith('.int')]
      self.logfile = os.path.abspath(os.path.join(int_folder, 'iota.log'))

      analysis = Analyzer(self, img_objects, self.iver)
      analysis.print_results()
      analysis.unit_cell_analysis(write_files=False)
      analysis.print_summary(write_files=False)
    else:
      print 'No results found in {}'.format(int_folder)
Ejemplo n.º 11
0
def OnChangeSymmetry (event) :
  from mmtbx.scaling.xtriage import change_symmetry
  button = event.GetEventObject()
  data_file = button.data_file
  assert (data_file is not None) and os.path.isfile(data_file)
  item = button.symm_table.GetFirstSelected()
  if (item == -1) :
    raise Sorry("Please select a symmetry setting first!")
  default_file = os.path.join(os.path.dirname(data_file), "reindexed.mtz")
  file_name = wxtbx.path_dialogs.manager().select_file(
    parent=button,
    message="Output data file in new symmetry",
    style=wx.SAVE,
    wildcard="MTZ files (*.mtz)|*.mtz",
    current_file=default_file)
  data = easy_pickle.load(data_file)
  space_group_symbol = str(button.symm_table.GetItemText(item))
  change_symmetry(
    miller_array=data,
    space_group_symbol=space_group_symbol,
    file_name=str(file_name),
    log=sys.stdout)
  assert os.path.isfile(file_name)
  wx.MessageBox(("The data (as amplitudes) have been saved to %s "+
      "with the selected symmetry setting.") % file_name)
Ejemplo n.º 12
0
def get_rotarama_data (residue_type=None, pos_type=None, db="rama",
    convert_to_numpy_array=False) :
  from mmtbx.rotamer import ramachandran_eval
  from mmtbx.rotamer.rotamer_eval import find_rotarama_data_dir
  # backwards compatibility
  if (pos_type == "proline") : pos_type = "trans-proline"
  if (pos_type == "prepro") : pos_type = "pre-proline"
  assert (pos_type in ["general", "cis-proline", "trans-proline", "glycine",
    "isoleucine or valine", "pre-proline",None])
  assert (db in ["rama", "rota"])
  assert (residue_type is not None) or (pos_type is not None)
  if pos_type is not None :
    residue_type = ramachandran_eval.aminoAcids_8000[pos_type]
  if residue_type.lower() in ["phe", "tyr"] :
    residue_type = "phetyr"
  assert (residue_type is not None)
  rama_data_dir = find_rotarama_data_dir()
  if (db == "rama") :
    pkl_file = "%s8000-%s.pickle" % (db, residue_type)
  else :
    pkl_file = "%s8000-%s.pickle" % (db, residue_type.lower())
  ndt = easy_pickle.load(os.path.join(rama_data_dir, pkl_file))
  if convert_to_numpy_array :
    if (db == "rama") :
      return export_ramachandran_distribution(ndt)
    else :
      return export_rotamer_distribution(ndt)
  else :
    return ndt
Ejemplo n.º 13
0
 def __init__(self, params):
   self.src = psana.Source(params.spectra_filter.detector_address)
   self.roi = params.spectra_filter.roi
   self.bg_roi = params.spectra_filter.background_roi
   self.dark_pickle = easy_pickle.load(params.spectra_filter.background_path)['DATA'].as_numpy_array()
   self.peak_range = params.spectra_filter.peak_range
   self.params = params
Ejemplo n.º 14
0
def run(args):
  assert len(args) == 3
  d1 = easy_pickle.load(args[0])
  d2 = easy_pickle.load(args[1])

  image_1 = d1["DATA"]
  image_2 = d2["DATA"]

  assert image_1.all() == image_2.all()
  diff_image = image_1 - image_2
  d = cspad_tbx.dpack(
    data=diff_image,
    timestamp=cspad_tbx.evt_timestamp(),
    distance=1,
  )
  easy_pickle.dump(args[2], d)
 def __init__(self):
   main_aaTables = RamachandranEval.aaTables
   self.aaTables = {}
   for aa,ndt_weakref in main_aaTables.items():
     # convert existing weak references to strong references
     self.aaTables[aa] = ndt_weakref()
   rama_data_dir = find_rotarama_data_dir()
   target_db = open_rotarama_dlite(rotarama_data_dir=rama_data_dir)
   no_update = os.path.exists(os.path.join(rama_data_dir, "NO_UPDATE"))
   for aa, aafile in aminoAcids_8000.items():
     if (self.aaTables.get(aa) is not None): continue
     data_file = "rama8000-"+aafile+".data"
     pickle_file = "rama8000-"+aafile+".pickle"
     pair_info = target_db.pair_info(
       source_path=data_file,
       target_path=pickle_file,
       path_prefix=rama_data_dir)
     if (((pair_info.needs_update) and (not no_update)) or not
         os.path.exists(os.path.join(rama_data_dir, pickle_file)))  :
       raise Sorry(
         "chem_data/rotarama_data/*.pickle files are missing or out of date.\n"
         "  Please run\n"
         "    mmtbx.rebuild_rotarama_cache\n"
         "  to resolve this problem.\n")
     ndt = easy_pickle.load(file_name=os.path.join(
       rama_data_dir, pair_info.target.path))
     self.aaTables[aa] = ndt
     main_aaTables[aa] = weakref.ref(ndt)
Ejemplo n.º 16
0
def get_mean_statistic_for_resolution (d_min, stat_type, range=0.2, out=None) :
  if (out is None) :
    out = sys.stdout
  from scitbx.array_family import flex
  pkl_file = libtbx.env.find_in_repositories(
    relative_path = "chem_data/polygon_data/all_mvd.pickle",
    test = os.path.isfile)
  db = easy_pickle.load(pkl_file)
  all_d_min = db['high_resolution']
  stat_values = db[stat_type]
  values_for_range = flex.double()
  for (d_, v_) in zip(all_d_min, stat_values) :
    try :
      d = float(d_)
      v = float(v_)
    except ValueError : continue
    else :
      if (d > (d_min - range)) and (d < (d_min + range)) :
        values_for_range.append(v)
  h = flex.histogram(values_for_range, n_slots=10)
  print >> out, "  %s for d_min = %.3f - %.3f A" % (stat_names[stat_type], d_min-range,
    d_min+range)
  min = flex.min(values_for_range)
  max = flex.max(values_for_range)
  mean = flex.mean(values_for_range)
  print >> out, "    count: %d" % values_for_range.size()
  print >> out, "    min: %.2f" % min
  print >> out, "    max: %.2f" % max
  print >> out, "    mean: %.2f" % mean
  print >> out, "    histogram of values:"
  h.show(prefix="      ")
  return mean
Ejemplo n.º 17
0
  def make_int_object_list(self):
    """ Generates list of image objects from previous grid search """
    from libtbx import easy_pickle as ep

    if self.params.cctbx.selection.select_only.grid_search_path == None:
      int_dir = misc.set_base_dir('integration', True)
    else:
      int_dir = self.params.cctbx.selection.select_only.grid_search_path

    img_objects = []

    cmd.Command.start("Importing saved grid search results")
    for root, dirs, files in os.walk(int_dir):
      for filename in files:
        found_file = os.path.join(root, filename)
        if found_file.endswith(('int')):
          obj = ep.load(found_file)
          img_objects.append(obj)
    cmd.Command.end("Importing saved grid search results -- DONE")

    # Pick a randomized subset of images
    if self.params.advanced.random_sample.flag_on and \
       self.params.advanced.random_sample.number < len(img_objects):
      gs_img_objects = self.select_random_subset(img_objects)
    else:
      gs_img_objects = img_objects

    return gs_img_objects
Ejemplo n.º 18
0
def run(args):
  if len(args)==0: print Usage; exit()
  processed = iotbx.phil.process_command_line(
    args=args, master_string=master_phil_str)
  args = processed.remaining_args
  work_params = processed.work.extract().xes
  processed.work.show()
  assert len(args) == 1
  output_dirname = work_params.output_dirname
  roi = cspad_tbx.getOptROI(work_params.roi)
  bg_roi = cspad_tbx.getOptROI(work_params.bg_roi)
  gain_map_path = work_params.gain_map
  estimated_gain = work_params.estimated_gain

  print output_dirname
  if output_dirname is None:
    output_dirname = os.path.join(os.path.dirname(args[0]), "finalise")
    print output_dirname
  hist_d = easy_pickle.load(args[0])
  if len(hist_d.keys())==2:
    hist_d = hist_d['histogram']
  pixel_histograms = faster_methods_for_pixel_histograms(
    hist_d, work_params)

  result = xes_from_histograms(
    pixel_histograms, output_dirname=output_dirname,
    gain_map_path=gain_map_path, estimated_gain=estimated_gain,
    roi=roi, run=work_params.run)
Ejemplo n.º 19
0
  def __init__(self, scratch_dir, pickle_pattern=None):
    pickle_dirname = "pickle"
    pickle_basename = "pkl_"
    self.nmemb = 0
    self.histogram = None
    if pickle_pattern is not None:
      path_pattern = "%s/%s/%s" %(scratch_dir, pickle_dirname, pickle_pattern)
    else:
      path_pattern = "%s/%s/%ss[0-9][0-9]-*.pickle" %(
        scratch_dir, pickle_dirname, pickle_basename)
    print path_pattern
    g = glob.glob(path_pattern)
    if len(g) == 0:
      print "No matches found for pattern: %s" %path_pattern
      return
    for path in g:
      try:
        d = easy_pickle.load(file_name=path)
      except EOFError:
        print "EOFError: skipping %s:" %path
        continue
      if len(d["histogram"].keys()) == 0: continue
      if self.histogram is None:
        self.histogram = d["histogram"]
      else:
        self.histogram = update_histograms(self.histogram, d["histogram"])
      self.nmemb += d["nmemb"]
      print "Read %d images from %s" % (d["nmemb"], path)

    print "Number of images used: %i" %self.nmemb
Ejemplo n.º 20
0
  def __init__(self,address,
               peak_one_position_min,
               peak_one_position_max,
               peak_two_position_min,
               peak_two_position_max,
               peak_one_width,
               peak_two_width,
               peak_ratio = 0.4,
               normalized_peak_to_noise_ratio = 0.4,
               spectrometer_dark_path = None,
               iron_edge_position = None):
    """The mod_spectrum_filter class constructor stores the parameters passed
    from the psana configuration file in instance variables.

    @param address Full data source address of the FEE device
    @param peak_one_position_min the minimum x coordinate in pixel units
     of the first peak position on the detector
    @param peak_one_position_max the maximum x coordinate in pixel units
     of the first peak position on the detector
    @param peak_two_position_min the minimum x coordinate in pixel units
     of the second peak position on the detector
    @param peak_two_position_max the maximum x coordinate in pixel units
     of the second peak position on the detector
    @param peak_one_width the width in pixels of the first peak
    @param peak_two_width the width in pixels of the second peak
    @param peak_ratio the ratio of the two peak heights
    @param normalized_peak_to_noise_ratio ratio of the normalized integrated
      peak to the normalized integrated regions between the peaks
    @param spectrometer_dark_path path to pickle file of FEE dark
      if None then no dark is subtracted from the spectrum
    @param iron_edge_position the position in pixels of the iron edge if absorbing
      iron foil is used in the experiment if None this is not used as a filtering parameter
    """
    #self.logger = logging.getLogger(self.__class__.__name__)
    #self.logger.setLevel(logging.INFO)
    #self.logging = logging

    self.src = Source('%s'%address)
    if spectrometer_dark_path is not None:
      self.dark = easy_pickle.load(spectrometer_dark_path)
    else:
      self.dark = None
    self.peak_one_position_min = int(peak_one_position_min)
    self.peak_one_position_max = int(peak_one_position_max)
    self.peak_two_position_min = int(peak_two_position_min)
    self.peak_two_position_max = int(peak_two_position_max)
    self.peak_one_width = int(peak_one_width)
    self.peak_two_width = int(peak_two_width)
    self.normalized_peak_to_noise_ratio = float(normalized_peak_to_noise_ratio)
    self.peak_ratio = float(peak_ratio)
    if iron_edge_position is not None:
      self.iron_edge_position = int(iron_edge_position)
    else:
      self.iron_edge_position = None

    self.ntwo_color = 0
    self.nnodata = 0
    self.nshots = 0
    self.naccepted= 0
Ejemplo n.º 21
0
def run(args):
  for file_name in args:
    structures = easy_pickle.load(file_name)
    if (isinstance(structures, xray.structure)):
      structures = [structures]
    for structure in structures:
      structure.show_summary().show_scatterers()
      print
Ejemplo n.º 22
0
def load_db (file_name=None) :
  if(file_name is None):
    file_name = libtbx.env.find_in_repositories(
      relative_path = "chem_data/polygon_data/all_mvd.pickle",
      test = os.path.isfile)
  assert os.path.isfile(file_name)
  database_dict = easy_pickle.load(file_name)
  return database_dict
Ejemplo n.º 23
0
def run(files, gain, prefix):
  from libtbx import easy_pickle
  for file in files:
    f = easy_pickle.load(file)
    old_miller = f['observations'][0]
    new_miller = old_miller.customized_copy(sigmas=gain * old_miller.sigmas())
    f['observations'][0] = new_miller
    easy_pickle.dump(prefix + file, f)
def unpickle_miller_arrays(file_name):
  result = easy_pickle.load(file_name)
  if (isinstance(result, miller.array)):
    return [result]
  result = list(result)
  for miller_array in result:
    if (not isinstance(miller_array, miller.array)):
      return None
  return result
Ejemplo n.º 25
0
def run(args):
  processed = iotbx.phil.process_command_line(
    args=args, master_string=master_phil_str)
  args = processed.remaining_args
  work_params = processed.work.extract().xes
  processed.work.show()
  assert len(args) == 1
  output_dirname = work_params.output_dirname
  roi = cspad_tbx.getOptROI(work_params.roi)
  bg_roi = cspad_tbx.getOptROI(work_params.bg_roi)
  gain_map_path = work_params.gain_map
  estimated_gain = work_params.estimated_gain
  nproc = work_params.nproc
  photon_threshold = work_params.photon_threshold
  method = work_params.method
  print output_dirname
  if output_dirname is None:
    output_dirname = os.path.join(os.path.dirname(args[0]), "finalise")
    print output_dirname
  hist_d = easy_pickle.load(args[0])
  if len(hist_d.keys())==2:
    hist_d = hist_d['histogram']
  pixel_histograms = view_pixel_histograms.pixel_histograms(
    hist_d, estimated_gain=estimated_gain)
  result = xes_from_histograms(
    pixel_histograms, output_dirname=output_dirname,
    gain_map_path=gain_map_path, estimated_gain=estimated_gain,
    method=method, nproc=nproc,
    photon_threshold=photon_threshold, roi=roi, run=work_params.run)

  if bg_roi is not None:
    bg_outdir = os.path.normpath(output_dirname)+"_bg"
    bg_result = xes_from_histograms(
      pixel_histograms, output_dirname=bg_outdir,
      gain_map_path=gain_map_path, estimated_gain=estimated_gain,
      method=method, nproc=nproc,
      photon_threshold=photon_threshold, roi=bg_roi)

    from xfel.command_line.subtract_background import subtract_background
    signal = result.spectrum
    background = bg_result.spectrum
    signal = (signal[0].as_double(), signal[1])
    background = (background[0].as_double(), background[1])
    signal_x, background_subtracted = subtract_background(signal, background, plot=True)
    f = open(os.path.join(output_dirname, "background_subtracted.txt"), "wb")
    print >> f, "\n".join(["%i %f" %(x, y)
                           for x, y in zip(signal_x, background_subtracted)])
    f.close()

  else:
    from xfel.command_line import smooth_spectrum
    from scitbx.smoothing import savitzky_golay_filter
    x, y = result.spectrum[0].as_double(), result.spectrum[1]
    x, y = smooth_spectrum.interpolate(x, y)
    x, y_smoothed = savitzky_golay_filter(
      x, y, 20, 4)
    smooth_spectrum.estimate_signal_to_noise(x, y, y_smoothed)
Ejemplo n.º 26
0
 def get_image_integration (self, sol_id, image_id=None, file_name=None) :
   assert (image_id is not None) or (file_name is not None)
   file_name = os.path.join(self.dir_name, "integration_%d_%d.pkl" % (sol_id,
     image_id))
   if (not os.path.exists(file_name)) :
     raise Sorry("Can't find the file %s!" % file_name)
   integ_result = easy_pickle.load(file_name)
   summary = get_integration_summary(integ_result, image_id)
   return integ_result, summary
Ejemplo n.º 27
0
 def __init__(self, file_name, file_name_during_write=None):
   self.file_name = file_name
   if (file_name_during_write is None and self.file_name is not None):
     self.file_name_during_write = self.file_name + ".new"
   else:
     self.file_name_during_write = file_name_during_write
   if (self.file_name is None
       or not os.path.exists(self.file_name)):
     self.pair_infos = {}
   else:
     self.pair_infos = easy_pickle.load(file_name=self.file_name)
Ejemplo n.º 28
0
  def plot_runtime_results(self):
    ''' Plot results for each cycle upon cycle completion '''

    stat_file = os.path.join(self.pparams.run_no, 'pickle.stat')
    if os.path.isfile(stat_file):
      info = ep.load(stat_file)
    else:
      info = {}
    total_cycles = self.pparams.n_postref_cycle
    if 'binned_resolution' in info:
      self.graph_tab.draw_plots(info, total_cycles)
Ejemplo n.º 29
0
  def readHeader(self):
    # XXX The functionality provided by this function has largely been
    # replicated in
    # rstbx.slip_viewer.tile_generation._get_flex_image_multitile().
    # However, several code paths still depend on the member variables
    # created here.

    from xfel.cftbx.detector.metrology import metrology_as_transformation_matrices

    d = easy_pickle.load(self.filename)

    # Derive the transformation matrices from the metrology in the
    # image.
    self._metrology_params = d["METROLOGY"].extract()
    self._matrices = metrology_as_transformation_matrices(
      self._metrology_params)

    self._tiles = d["TILES"]
    self._keylist = self._tiles.keys() #for later use by get_pixel_intensity()

    # Assert that all ASIC:s are the same size, and that there are
    # transformation matrices for each ASIC.
    for (key, asic) in self._tiles.iteritems():
      if not hasattr(self, "_asic_focus"):
        self._asic_focus = asic.focus()
      else:
        assert asic.focus() == self._asic_focus
      assert key in self._matrices

    # Assert that all specified pixel sizes and saturated values are
    # equal and not None.
    for p in self._metrology_params.detector.panel:
      for s in p.sensor:
        for a in s.asic:
          if not hasattr(self, "_pixel_size"):
            self._pixel_size = a.pixel_size
          else:
            assert self._pixel_size == a.pixel_size

          if not hasattr(self, "_saturation"):
            self._saturation = a.saturation
          else:
            # XXX real-valued equality!  See
            # cctbx_project/scitbx/math/approx_equal.h
            assert self._saturation == a.saturation
    assert hasattr(self, "_pixel_size") and self._pixel_size is not None
    assert hasattr(self, "_saturation") and self._saturation is not None

    # Determine next multiple of eight.  Set size1 and size2 to the
    # focus of the padded rawdata.
    self._asic_padded = (8 * int(math.ceil(self._asic_focus[0] / 8)),
                         8 * int(math.ceil(self._asic_focus[1] / 8)))
    self.size1 = len(self._tiles) * self._asic_padded[0]
    self.size2 = self._asic_padded[1]
Ejemplo n.º 30
0
def fetch_fingerprints(motif_list):
  fingerprint_list = []
  for motif in motif_list:
    path = "cctbx_project/mmtbx/cablam/fingerprints/"+motif+".pickle"
    picklefile = libtbx.env.find_in_repositories(
      relative_path=path, test=os.path.isfile)
    if picklefile is None:
      raise Sorry("\nCould not find a needed pickle file for motif "+motif+" in chem_data.\nExiting.\n")
    else:
      fingerprint_list.append(easy_pickle.load(file_name=picklefile))
  return fingerprint_list
            print picklepath
            destpath_l = os.path.join(
                destroot_l,
                os.path.splitext(picklename)[0] + "_l.pickle")
            destpath_r = os.path.join(
                destroot_r,
                os.path.splitext(picklename)[0] + "_r.pickle")
            destpath_m = os.path.join(
                destroot_m,
                os.path.splitext(picklename)[0] + "_m.pickle")
            destpath_n = os.path.join(
                destroot_n,
                os.path.splitext(picklename)[0] + "_n.pickle")
            #if os.path.exists(destpath_l): continue
            try:
                data = easy_pickle.load(picklepath)
            except Exception as e:
                print "Pickle failed to load", picklepath
                continue
            if not "fuller_kapton_absorption_correction" in data:
                continue

            corr = data["fuller_kapton_absorption_correction"]

            from xfel.cxi.cspad_ana.rayonix_tbx import get_rayonix_pixel_size
            from scitbx.array_family import flex
            pixel_size = get_rayonix_pixel_size(2)
            bx = data['xbeam'] / pixel_size
            by = data['ybeam'] / pixel_size

            preds = data['mapped_predictions']
Ejemplo n.º 32
0
def run(args):
    processed = iotbx.phil.process_command_line(args=args,
                                                master_string=master_phil_str)
    args = processed.remaining_args
    work_params = processed.work.extract().xes
    processed.work.show()
    assert len(args) == 1
    output_dirname = work_params.output_dirname
    roi = cspad_tbx.getOptROI(work_params.roi)
    bg_roi = cspad_tbx.getOptROI(work_params.bg_roi)
    gain_map_path = work_params.gain_map
    estimated_gain = work_params.estimated_gain
    nproc = work_params.nproc
    photon_threshold = work_params.photon_threshold
    method = work_params.method
    print(output_dirname)
    if output_dirname is None:
        output_dirname = os.path.join(os.path.dirname(args[0]), "finalise")
        print(output_dirname)
    hist_d = easy_pickle.load(args[0])
    if len(hist_d) == 2:
        hist_d = hist_d['histogram']
    pixel_histograms = view_pixel_histograms.pixel_histograms(
        hist_d, estimated_gain=estimated_gain)
    result = xes_from_histograms(pixel_histograms,
                                 output_dirname=output_dirname,
                                 gain_map_path=gain_map_path,
                                 estimated_gain=estimated_gain,
                                 method=method,
                                 nproc=nproc,
                                 photon_threshold=photon_threshold,
                                 roi=roi,
                                 run=work_params.run)

    if bg_roi is not None:
        bg_outdir = os.path.normpath(output_dirname) + "_bg"
        bg_result = xes_from_histograms(pixel_histograms,
                                        output_dirname=bg_outdir,
                                        gain_map_path=gain_map_path,
                                        estimated_gain=estimated_gain,
                                        method=method,
                                        nproc=nproc,
                                        photon_threshold=photon_threshold,
                                        roi=bg_roi)

        from xfel.command_line.subtract_background import subtract_background
        signal = result.spectrum
        background = bg_result.spectrum
        signal = (signal[0].as_double(), signal[1])
        background = (background[0].as_double(), background[1])
        signal_x, background_subtracted = subtract_background(signal,
                                                              background,
                                                              plot=True)
        f = open(os.path.join(output_dirname, "background_subtracted.txt"),
                 "wb")
        print("\n".join([
            "%i %f" % (x, y) for x, y in zip(signal_x, background_subtracted)
        ]),
              file=f)
        f.close()

    else:
        from xfel.command_line import smooth_spectrum
        from scitbx.smoothing import savitzky_golay_filter
        x, y = result.spectrum[0].as_double(), result.spectrum[1]
        x, y = smooth_spectrum.interpolate(x, y)
        x, y_smoothed = savitzky_golay_filter(x, y, 20, 4)
        smooth_spectrum.estimate_signal_to_noise(x, y, y_smoothed)
Ejemplo n.º 33
0
def run (args, imageset = None):
  from xfel import radial_average
  from scitbx.array_family import flex
  import os, sys
  from dxtbx.datablock import DataBlockFactory

  # Parse input
  try:
    n = len(args)
  except Exception:
    params = args
  else:
    user_phil = []
    for arg in args:
      if (not "=" in arg):
        try :
          user_phil.append(libtbx.phil.parse("""file_path=%s""" % arg))
        except ValueError:
          raise Sorry("Unrecognized argument '%s'" % arg)
      else:
        try:
          user_phil.append(libtbx.phil.parse(arg))
        except RuntimeError as e:
          raise Sorry("Unrecognized argument '%s' (error: %s)" % (arg, str(e)))
    params = master_phil.fetch(sources=user_phil).extract()
  if imageset is None:
    if params.file_path is None or len(params.file_path) == 0 or not all([os.path.isfile(f) for f in params.file_path]):
      master_phil.show()
      raise Usage("file_path must be defined (either file_path=XXX, or the path alone).")
  assert params.n_bins is not None
  assert params.verbose is not None
  assert params.output_bins is not None

  # Allow writing to a file instead of stdout
  if params.output_file is None:
    logger = sys.stdout
  else:
    logger = open(params.output_file, 'w')
    logger.write("%s "%params.output_file)

  if params.show_plots:
    from matplotlib import pyplot as plt
    import numpy as np
    colormap = plt.cm.gist_ncar
    plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0, 0.9, len(params.file_path))])

  if params.mask is not None:
    params.mask = easy_pickle.load(params.mask)

  if imageset is None:
    iterable = params.file_path
    load_func = lambda x: DataBlockFactory.from_filenames([x])[0].extract_imagesets()[0]
  else:
    iterable = [imageset]
    load_func = lambda x: x

  # Iterate over each file provided
  for item in iterable:
    iset = load_func(item)
    n_images = len(iset)
    if params.image_number is None:
      subiterable = xrange(n_images)
    else:
      subiterable = [params.image_number]
    for image_number in subiterable:
      beam = iset.get_beam(image_number)
      detector = iset.get_detector(image_number)
      s0 = col(beam.get_s0())

      # Search the detector for the panel farthest from the beam. The number of bins in the radial average will be
      # equal to the farthest point from the beam on the detector, in pixels, unless overridden at the command line
      panel_res = [p.get_max_resolution_at_corners(s0) for p in detector]
      farthest_panel = detector[panel_res.index(min(panel_res))]
      size2, size1 = farthest_panel.get_image_size()
      corners = [(0,0), (size1-1,0), (0,size2-1), (size1-1,size2-1)]
      corners_lab = [col(farthest_panel.get_pixel_lab_coord(c)) for c in corners]
      corner_two_thetas = [farthest_panel.get_two_theta_at_pixel(s0, c) for c in corners]
      extent_two_theta = max(corner_two_thetas)
      max_corner = corners_lab[corner_two_thetas.index(extent_two_theta)]
      extent = int(math.ceil(max_corner.length()*math.sin(extent_two_theta)/max(farthest_panel.get_pixel_size())))
      extent_two_theta *= 180/math.pi

      if params.n_bins < extent:
        params.n_bins = extent

      # These arrays will store the radial average info
      sums    = flex.double(params.n_bins) * 0
      sums_sq = flex.double(params.n_bins) * 0
      counts  = flex.int(params.n_bins) * 0

      all_data = iset[image_number]

      if not isinstance(all_data, tuple):
        all_data = (all_data,)

      for tile, (panel, data) in enumerate(zip(detector, all_data)):
        if params.mask is None:
          mask = flex.bool(flex.grid(data.focus()), True)
        else:
          mask = params.mask[tile]

        if hasattr(data,"as_double"):
          data = data.as_double()

        logger.flush()
        if params.verbose:
          logger.write("Average intensity tile %d: %9.3f\n"%(tile, flex.mean(data)))
          logger.write("N bins: %d\n"%params.n_bins)
          logger.flush()

        x1,y1,x2,y2 = 0,0,panel.get_image_size()[1],panel.get_image_size()[0]
        bc = panel.get_beam_centre_px(beam.get_s0())
        bc = int(round(bc[1])), int(round(bc[0]))

        # compute the average
        radial_average(data,mask,bc,sums,sums_sq,counts,panel.get_pixel_size()[0],panel.get_distance(),
                       (x1,y1),(x2,y2))

      # average the results, avoiding division by zero
      results = sums.set_selected(counts <= 0, 0)
      results /= counts.set_selected(counts <= 0, 1).as_double()

      if params.median_filter_size is not None:
        logger.write("WARNING, the median filter is not fully propogated to the variances\n")
        from scipy.ndimage.filters import median_filter
        results = flex.double(median_filter(results.as_numpy_array(), size = params.median_filter_size))

      # calculate standard devations
      stddev_sel = ((sums_sq-sums*results) >= 0) & (counts > 0)
      std_devs = flex.double(len(sums), 0)
      std_devs.set_selected(stddev_sel,
                           (sums_sq.select(stddev_sel)-sums.select(stddev_sel)* \
                            results.select(stddev_sel))/counts.select(stddev_sel).as_double())
      std_devs = flex.sqrt(std_devs)

      twotheta = flex.double(xrange(len(results)))*extent_two_theta/params.n_bins
      q_vals = 4*math.pi*flex.sin(math.pi*twotheta/360)/beam.get_wavelength()

      if params.low_max_two_theta_limit is None:
        subset = results
      else:
        subset = results.select(twotheta >= params.low_max_two_theta_limit)

      max_result = flex.max(subset)

      if params.x_axis == 'two_theta':
        xvals = twotheta
        max_x = twotheta[flex.first_index(results, max_result)]
      elif params.x_axis == 'q':
        xvals = q_vals
        max_x = q_vals[flex.first_index(results, max_result)]

      for i in xrange(len(results)):
        val = xvals[i]
        if params.output_bins and "%.3f"%results[i] != "nan":
         #logger.write("%9.3f %9.3f\n"%     (val,results[i]))        #.xy  format for Rex.cell.
          logger.write("%9.3f %9.3f %9.3f\n"%(val,results[i],std_devs[i])) #.xye format for GSASII
         #logger.write("%.3f %.3f %.3f\n"%(val,results[i],ds[i]))  # include calculated d spacings
      logger.write("Maximum %s: %f, value: %f\n"%(params.x_axis, max_x, max_result))

      if params.show_plots:
        if params.plot_x_max is not None:
          results = results.select(xvals <= params.plot_x_max)
          xvals = xvals.select(xvals <= params.plot_x_max)
        if params.normalize:
          plt.plot(xvals.as_numpy_array(),(results/flex.max(results)).as_numpy_array(),'-')
        else:
          plt.plot(xvals.as_numpy_array(),results.as_numpy_array(),'-')
        if params.x_axis == 'two_theta':
          plt.xlabel("2 theta")
        elif params.x_axis == 'q':
          plt.xlabel("q")
        plt.ylabel("Avg ADUs")
        if params.plot_y_max is not None:
          plt.ylim(0, params.plot_y_max)

  if params.show_plots:
    #plt.legend([os.path.basename(os.path.splitext(f)[0]) for f in params.file_path], ncol=2)
    plt.show()

  return xvals, results
Ejemplo n.º 34
0
    def get_results(self, finished_objects=None):
        if not finished_objects:
            finished_objects = self.info.get_finished_objects()
            if not finished_objects:
                return False
        final_objects = []

        self.info.unplotted_stats = {}
        for key in self.info.stats:
            self.info.unplotted_stats[key] = dict(lst=[])

        for obj in finished_objects:
            item = [obj.input_index, obj.img_path, obj.img_index]
            if len(self.info.unprocessed) > 0 and item in self.info.unprocessed:
                self.info.unprocessed.remove(item)
            if (
                len(self.info.categories["not_processed"][0]) > 0
                and item in self.info.categories["not_processed"][0]
            ):
                self.info.categories["not_processed"][0].remove(item)

            if obj.fail:
                key = obj.fail.replace(" ", "_")
                if key in self.info.categories:
                    self.info.categories[key][0].append(item)
            else:
                self.info.categories["integrated"][0].append(obj.final["final"])
                self.info.final_objects.append(obj.obj_file)
                final_objects.append(obj)

            if not obj.fail or "triage" not in obj.fail:
                self.info.categories["have_diffraction"][0].append(obj.img_path)

        # Calculate processing stats from final objects
        if final_objects:
            self.info.pixel_size = final_objects[0].final["pixel_size"]

            # Get observations from file
            try:
                all_obs = ep.load(self.info.idx_file)
            except Exception:
                all_obs = None

            # Collect image processing stats
            for obj in final_objects:
                for key in self.info.stats:
                    if key in obj.final:
                        stat_tuple = (
                            obj.input_index,
                            obj.img_path,
                            obj.img_index,
                            obj.final[key],
                        )
                        self.info.stats[key]["lst"].append(stat_tuple)

                        # add proc filepath info to 'pointers'
                        pointer_dict = {
                            "img_file": obj.img_path,
                            "obj_file": obj.obj_file,
                            "img_index": obj.img_index,
                            "experiments": obj.eint_path,
                            "reflections": obj.rint_path,
                        }
                        self.info.pointers[str(obj.input_index)] = pointer_dict

                        if key not in self.info.unplotted_stats:
                            self.info.unplotted_stats[key] = dict(lst=[])
                        self.info.unplotted_stats[key]["lst"].append(stat_tuple)

                # Unit cells and space groups (i.e. cluster iterable)
                self.info.cluster_iterable.append(
                    [
                        float(obj.final["a"]),
                        float(obj.final["b"]),
                        float(obj.final["c"]),
                        float(obj.final["alpha"]),
                        float(obj.final["beta"]),
                        float(obj.final["gamma"]),
                        str(obj.final["sg"]),
                    ]
                )

                # Get observations from this image
                obs = None
                if "observations" in obj.final:
                    obs = obj.final["observations"].as_non_anomalous_array()
                else:
                    pickle_path = obj.final["final"]
                    if os.path.isfile(pickle_path):
                        try:
                            pickle = ep.load(pickle_path)
                            obs = pickle["observations"][0].as_non_anomalous_array()
                        except Exception as e:
                            print(
                                "IMAGE_PICKLE_ERROR for {}: {}".format(pickle_path, e)
                            )

                with util.Capturing():
                    if obs:
                        # Append observations to combined miller array
                        obs = obs.expand_to_p1()
                        if all_obs:
                            all_obs = all_obs.concatenate(
                                obs, assert_is_similar_symmetry=False
                            )
                        else:
                            all_obs = obs

                        # Get B-factor from this image
                        try:
                            mxh = mx_handler()
                            asu_contents = mxh.get_asu_contents(500)
                            observations_as_f = obs.as_amplitude_array()
                            observations_as_f.setup_binner(auto_binning=True)
                            wp = statistics.wilson_plot(
                                observations_as_f, asu_contents, e_statistics=True
                            )
                            b_factor = wp.wilson_b
                        except RuntimeError as e:
                            b_factor = 0
                            print("B_FACTOR_ERROR: ", e)
                        self.info.b_factors.append(b_factor)

            # Save collected observations to file
            if all_obs:
                ep.dump(self.info.idx_file, all_obs)

            # Calculate dataset stats
            for k in self.info.stats:
                stat_list = list(zip(*self.info.stats[k]["lst"]))[3]
                stats = dict(
                    lst=self.info.stats[k]["lst"],
                    median=np.median(stat_list).item(),
                    mean=np.mean(stat_list).item(),
                    std=np.std(stat_list).item(),
                    max=np.max(stat_list).item(),
                    min=np.min(stat_list).item(),
                    cons=Counter(stat_list).most_common(1)[0][0],
                )
                self.info.stats[k].update(stats)
            return True
        else:
            return False
Ejemplo n.º 35
0
  def __init__(self, pixel_histograms, output_dirname=".", gain_map_path=None,
               gain_map=None, estimated_gain=30,roi=None,run=None):

    self.sum_img = flex.double(flex.grid(370,391), 0) # XXX define the image size some other way?
    gain_img = flex.double(self.sum_img.accessor(), 0)

    assert [gain_map, gain_map_path].count(None) > 0
    if gain_map_path is not None:
      d = easy_pickle.load(gain_map_path)
      gain_map = d["DATA"]

    mask = flex.int(self.sum_img.accessor(), 0)

    start_row = 370
    end_row = 0
    print(len(pixel_histograms.histograms))

    pixels = list(pixel_histograms.pixels())
    n_pixels = len(pixels)
    if roi is not None:
      for k, (i, j) in enumerate(reversed(pixels)):
        if (   i < roi[2]
            or i > roi[3]
            or j < roi[0]
            or j > roi[1]):
          del pixels[n_pixels-k-1]

    if gain_map is None:
      fixed_func = pixel_histograms.fit_one_histogram
    else:
      def fixed_func(pixel):
        return pixel_histograms.fit_one_histogram(pixel, n_gaussians=1)

    chi_squared_list=flex.double()

    for i, pixel in enumerate(pixels):
      #print i,pixel
      LEG = False
      start_row = min(start_row, pixel[0])
      end_row = max(end_row, pixel[0])
      n_photons = 0

      try:
          if LEG: gaussians, two_photon_flag = pixel_histograms.fit_one_histogram(pixel)
          alt_gaussians = pixel_histograms.fit_one_histogram_two_gaussians(pixel)
      except ZeroDivisionError:
          print("HEY DIVIDE BY ZERO")
          #pixel_histograms.plot_combo(pixel, gaussians)
          mask[pixel] = 1
          continue
      except RuntimeError as e:
          print("Error fitting pixel %s" %str(pixel))
          print(str(e))
          mask[pixel] = 1
          continue

      hist = pixel_histograms.histograms[pixel]

      if not LEG:
        gs = alt_gaussians[1].params
        fit_photons = gs[0] * gs[2] * math.sqrt(2.*math.pi)
        n_photons = int(round(fit_photons,0))
        fit_interpretation=pixel_histograms.multiphoton_and_fit_residual(
                     pixel_histograms.histograms[pixel], alt_gaussians)
        multi_photons = fit_interpretation.get_multiphoton_count()
        total_photons = n_photons + multi_photons

        if False and n_photons< 0: # Generally, do not mask negative values; if fit is still OK
          print("\n%d pixel %s altrn %d photons from curvefitting"%( i,pixel,n_photons ))
          pixel_histograms.plot_combo(pixel, alt_gaussians,
                                      interpretation=fit_interpretation)
          mask[pixel]=1 # do not mask out negative pixels if the Gaussian fit is good
          continue

        chi_squared_list.append(fit_interpretation.chi_squared())
        suspect = False # don't know the optimal statistical test.  Histograms vary primarily by total count & # photons
        if total_photons <= 3:
          if fit_interpretation.chi_squared() > 2.5 or fit_interpretation.quality_factor < 5: suspect=True
        elif 3 < total_photons <= 10:
          if fit_interpretation.chi_squared() > 5 or fit_interpretation.quality_factor < 10: suspect=True
        elif 10 < total_photons <= 33:
          if fit_interpretation.chi_squared() > 10 or fit_interpretation.quality_factor < 20: suspect=True
        elif 33 < total_photons <= 100:
          if fit_interpretation.chi_squared() > 20 or fit_interpretation.quality_factor < 20: suspect=True
        elif 100 < total_photons <= 330:
          if fit_interpretation.chi_squared() > 30 or fit_interpretation.quality_factor < 25: suspect=True
        elif 330 < total_photons <= 1000:
          if fit_interpretation.chi_squared() > 40 or fit_interpretation.quality_factor < 30: suspect=True
        elif 1000 < total_photons:
          if fit_interpretation.chi_squared() > 50 or fit_interpretation.quality_factor < 30: suspect=True

        if suspect:
          print("\n%d pixel %s Bad quality 0/1-photon fit"%(i,pixel),fit_interpretation.quality_factor)
          print("   with chi-squared %10.5f"%fit_interpretation.chi_squared())
          print("   Suspect",suspect)
          print("%d fit photons, %d total photons"%(n_photons,total_photons))
          #pixel_histograms.plot_combo(pixel, alt_gaussians,
          #                            interpretation=fit_interpretation)
          mask[pixel]=1
          continue

        self.sum_img[pixel] = n_photons + multi_photons

    mask.set_selected(self.sum_img == 0, 1)
    unbound_pixel_mask = xes_finalise.cspad_unbound_pixel_mask()
    mask.set_selected(unbound_pixel_mask > 0, 1)
    bad_pixel_mask = xes_finalise.cspad2x2_bad_pixel_mask_cxi_run7()
    mask.set_selected(bad_pixel_mask > 0, 1)

    for row in range(self.sum_img.all()[0]):
      self.sum_img[row:row+1,:].count(0)

    spectrum_focus = self.sum_img[start_row:end_row,:]
    mask_focus = mask[start_row:end_row,:]

    spectrum_focus.set_selected(mask_focus > 0, 0)

    xes_finalise.filter_outlying_pixels(spectrum_focus, mask_focus)

    print("Number of rows: %i" %spectrum_focus.all()[0])
    print("Estimated no. photons counted: %i" %flex.sum(spectrum_focus))
    print("Number of images used: %i" %flex.sum(
      pixel_histograms.histograms.values()[0].slots()))

    d = cspad_tbx.dpack(
      address='CxiSc1-0|Cspad2x2-0',
      data=spectrum_focus,
      distance=1,
      ccd_image_saturation=2e8, # XXX
    )
    if run is not None: runstr="_%04d"%run
    else: runstr=""
    cspad_tbx.dwritef(d, output_dirname, 'sum%s_'%runstr)


    plot_x, plot_y = xes_finalise.output_spectrum(
      spectrum_focus.iround(), mask_focus=mask_focus,
      output_dirname=output_dirname, run=run)
    self.spectrum = (plot_x, plot_y)
    self.spectrum_focus = spectrum_focus
    xes_finalise.output_matlab_form(spectrum_focus, "%s/sum%s.m" %(output_dirname,runstr))
    print(output_dirname)
    print("Average chi squared is",flex.mean(chi_squared_list),"on %d shots"%flex.sum(hist.slots()))
Ejemplo n.º 36
0
def refine_expanding(params, merged_scope, combine_phil):
    assert params.start_at_hierarchy_level == 0
    if params.rmsd_filter.enable:
        input_name = "filtered"
        command = "cctbx.xfel.filter_experiments_by_rmsd %s %s output.filtered_experiments=%s output.filtered_reflections=%s"
        command = command % ("%s_combined_experiments.json" % params.tag,
                             "%s_combined_reflections.pickle" % params.tag,
                             "%s_filtered_experiments.json" % params.tag,
                             "%s_filtered_reflections.pickle" % params.tag)
        command += " iqr_multiplier=%f" % params.rmsd_filter.iqr_multiplier
        print command
        result = easy_run.fully_buffered(command=command).raise_if_errors()
        result.show_stdout()
    else:
        input_name = "combined"

    # this is the order to refine the CSPAD in
    steps = {}
    steps[0] = [2, 3]
    steps[1] = steps[0] + [0, 1]
    steps[2] = steps[1] + [14, 15]
    steps[3] = steps[2] + [6, 7]
    steps[4] = steps[3] + [4, 5]
    steps[5] = steps[4] + [12, 13]
    steps[6] = steps[5] + [8, 9]
    steps[7] = steps[6] + [10, 11]

    for s, panels in steps.iteritems():
        rest = []
        for p in panels:
            rest.append(p + 16)
            rest.append(p + 32)
            rest.append(p + 48)
        panels.extend(rest)

    levels = {0: (0, 1)}  # levels 0 and 1
    for i in range(7):
        levels[i + 1] = (2, )  # level 2

    previous_step_and_level = None
    for j in range(8):
        from libtbx import easy_pickle
        print "Filtering out all reflections except those on panels %s" % (
            ", ".join(["%d" % p for p in steps[j]]))
        combined_path = "%s_%s_reflections.pickle" % (params.tag, input_name)
        output_path = "%s_reflections_step%d.pickle" % (params.tag, j)
        data = easy_pickle.load(combined_path)
        sel = None
        for panel_id in steps[j]:
            if sel is None:
                sel = data['panel'] == panel_id
            else:
                sel |= data['panel'] == panel_id
        print "Retaining", len(
            data.select(sel)), "out of", len(data), "reflections"
        easy_pickle.dump(output_path, data.select(sel))

        for i in levels[j]:
            print "Step", j, "refining at hierarchy level", i
            refine_phil_file = "%s_refine_step%d_level%d.phil" % (params.tag,
                                                                  j, i)
            if i == 0:
                if params.refine_distance:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Tau1"  # fix detector rotz
                else:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Dist,Tau1"  # fix detector rotz, distance
                if params.flat_refinement:
                    diff_phil += ",Tau2,Tau3"  # Also fix x and y rotations
                diff_phil += "\n"
                if params.refine_energy:
                    diff_phil += "refinement.parameterisation.beam.fix=in_spindle_plane+out_spindle_plane\n"  # allow energy to refine
            else:
                # Note, always need to fix something, so pick a panel group and fix its Tau1 (rotation around Z) always
                if params.flat_refinement and params.flat_refinement_with_distance:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1,Tau2,Tau3\n"  # refine distance, rotz and xy translation
                    diff_phil += "refinement.parameterisation.detector.constraints.parameter=Dist\n"  # constrain distance to be refined identically for all panels at this hierarchy level
                elif params.flat_refinement:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Dist,Group1Tau1,Tau2,Tau3\n"  # refine only rotz and xy translation
                else:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1\n"  # refine almost everything

            if previous_step_and_level is None:
                command = "dials.refine %s %s_%s_experiments.json %s_reflections_step%d.pickle"%( \
                  refine_phil_file, params.tag, input_name, params.tag, j)
            else:
                p_step, p_level = previous_step_and_level
                if p_step == j:
                    command = "dials.refine %s %s_refined_experiments_step%d_level%d.json %s_refined_reflections_step%d_level%d.pickle"%( \
                      refine_phil_file, params.tag, p_step, p_level, params.tag, p_step, p_level)
                else:
                    command = "dials.refine %s %s_refined_experiments_step%d_level%d.json %s_reflections_step%d.pickle"%( \
                      refine_phil_file, params.tag, p_step, p_level, params.tag, j)

            diff_phil += "refinement.parameterisation.detector.hierarchy_level=%d\n" % i

            output_experiments = "%s_refined_experiments_step%d_level%d.json" % (
                params.tag, j, i)
            command += " output.experiments=%s output.reflections=%s_refined_reflections_step%d_level%d.pickle"%( \
              output_experiments, params.tag, j, i)

            scope = merged_scope.fetch(parse(diff_phil))
            f = open(refine_phil_file, 'w')
            f.write(refine_scope.fetch_diff(scope).as_str())
            f.close()

            print command
            result = easy_run.fully_buffered(command=command).raise_if_errors()
            result.show_stdout()

            # In expanding mode, if using flat refinement with distance, after having refined this step as a block, unrefined
            # panels will have been left behind.  Read back the new metrology, compute the shift applied to the panels refined
            # in this step,and apply that shift to the unrefined panels in this step
            if params.flat_refinement and params.flat_refinement_with_distance and i > 0:
                from dxtbx.model.experiment_list import ExperimentListFactory, ExperimentListDumper
                from xfel.command_line.cspad_detector_congruence import iterate_detector_at_level, iterate_panels
                from scitbx.array_family import flex
                from scitbx.matrix import col
                from libtbx.test_utils import approx_equal
                experiments = ExperimentListFactory.from_json_file(
                    output_experiments, check_format=False)
                assert len(experiments.detectors()) == 1
                detector = experiments.detectors()[0]
                # Displacements: deltas along the vector normal to the detector
                displacements = flex.double()
                # Iterate through the panel groups at this level
                for panel_group in iterate_detector_at_level(
                        detector.hierarchy(), 0, i):
                    # Were there panels refined in this step in this panel group?
                    test = [
                        list(detector).index(panel) in steps[j]
                        for panel in iterate_panels(panel_group)
                    ]
                    if not any(test): continue
                    # Compute the translation along the normal of this panel group.  This is defined as distance in dials.refine
                    displacements.append(
                        col(panel_group.get_local_fast_axis()).cross(
                            col(panel_group.get_local_slow_axis())).dot(
                                col(panel_group.get_local_origin())))

                # Even though the panels are constrained to move the same amount, there is a bit a variation.
                stats = flex.mean_and_variance(displacements)
                displacement = stats.mean()
                print "Average displacement along normals: %f +/- %f" % (
                    stats.mean(), stats.unweighted_sample_standard_deviation())

                # Verify the variation isn't significant
                for k in range(1, len(displacements)):
                    assert approx_equal(displacements[0], displacements[k])
                # If all of the panel groups in this level moved, no need to do anything.
                if len(displacements) != len(
                        list(
                            iterate_detector_at_level(detector.hierarchy(), 0,
                                                      i))):
                    for panel_group in iterate_detector_at_level(
                            detector.hierarchy(), 0, i):
                        test = [
                            list(detector).index(panel) in steps[j]
                            for panel in iterate_panels(panel_group)
                        ]
                        # If any of the panels in this panel group moved, no need to do anything
                        if any(test): continue

                        # None of the panels in this panel group moved in this step, so need to apply displacement from other panel
                        # groups at this level
                        fast = col(panel_group.get_local_fast_axis())
                        slow = col(panel_group.get_local_slow_axis())
                        ori = col(panel_group.get_local_origin())
                        normal = fast.cross(slow)
                        panel_group.set_local_frame(
                            fast, slow, (ori.dot(fast) * fast) +
                            (ori.dot(slow) * slow) + (normal * displacement))

                # Check the new displacements. Should be the same across all panels.
                displacements = []
                for panel_group in iterate_detector_at_level(
                        detector.hierarchy(), 0, i):
                    displacements.append(
                        col(panel_group.get_local_fast_axis()).cross(
                            col(panel_group.get_local_slow_axis())).dot(
                                col(panel_group.get_local_origin())))

                for k in range(1, len(displacements)):
                    assert approx_equal(displacements[0], displacements[k])

                dump = ExperimentListDumper(experiments)
                dump.as_json(output_experiments)

            previous_step_and_level = j, i

    output_geometry(params)
Ejemplo n.º 37
0
def test_reindex(dials_regression, run_in_tmpdir):
    data_dir = os.path.join(dials_regression, "indexing_test_data",
                            "i04_weak_data")
    pickle_path = os.path.join(data_dir, "indexed.pickle")
    experiments_path = os.path.join(data_dir, "experiments.json")
    commands = [
        "dials.reindex",
        pickle_path,
        experiments_path,
        "change_of_basis_op=2a,b,c",
        "space_group=P1",
    ]
    command = " ".join(commands)
    print(command)

    result = easy_run.fully_buffered(command=command).raise_if_errors()
    old_reflections = easy_pickle.load(pickle_path)
    assert os.path.exists("reindexed.refl")
    new_reflections = easy_pickle.load("reindexed.refl")
    old_experiments = load.experiment_list(experiments_path,
                                           check_format=False)
    assert os.path.exists("reindexed.expt")
    new_experiments = load.experiment_list("reindexed.expt",
                                           check_format=False)
    h1, k1, l1 = old_reflections["miller_index"].as_vec3_double().parts()
    h2, k2, l2 = new_reflections["miller_index"].as_vec3_double().parts()
    assert 2 * h1 == pytest.approx(h2)
    assert k1 == pytest.approx(k2)
    assert l1 == pytest.approx(l2)
    old_uc_params = old_experiments[0].crystal.get_unit_cell().parameters()
    new_uc_params = new_experiments[0].crystal.get_unit_cell().parameters()
    assert new_uc_params[0] == pytest.approx(2 * old_uc_params[0])
    assert new_uc_params[1:] == pytest.approx(old_uc_params[1:])
    assert old_experiments[0].crystal.get_space_group().type().hall_symbol(
    ) == " P 1"
    assert new_experiments[0].crystal.get_space_group().type().hall_symbol(
    ) == " P 1"

    # set space group P4
    cb_op = sgtbx.change_of_basis_op("a,b,c")
    commands = [
        "dials.reindex",
        experiments_path,
        "space_group=P4",
        "change_of_basis_op=%s" % str(cb_op),
        "output.experiments=P4.expt",
    ]
    command = " ".join(commands)
    print(command)
    result = easy_run.fully_buffered(command=command).raise_if_errors()
    # apply one of the symops from the space group
    cb_op = sgtbx.change_of_basis_op("-x,-y,z")
    commands = [
        "dials.reindex",
        "P4.expt",
        "change_of_basis_op=%s" % str(cb_op),
        "output.experiments=P4_reindexed.expt",
    ]
    command = " ".join(commands)
    print(command)
    result = easy_run.fully_buffered(command=command).raise_if_errors()
    new_experiments1 = load.experiment_list("P4_reindexed.expt",
                                            check_format=False)
    assert new_experiments1[0].crystal.get_A() == pytest.approx(
        old_experiments[0].crystal.change_basis(cb_op).get_A())
    #
    cb_op = sgtbx.change_of_basis_op("-x,-y,z")
    commands = [
        "dials.reindex",
        "P4.expt",
        "change_of_basis_op=auto",
        "reference.experiments=P4_reindexed.expt",
        "output.experiments=P4_reindexed2.expt",
    ]
    command = " ".join(commands)
    print(command)
    result = easy_run.fully_buffered(command=command).raise_if_errors()
    new_experiments2 = load.experiment_list("P4_reindexed2.expt",
                                            check_format=False)
    assert new_experiments1[0].crystal.get_A() == pytest.approx(
        new_experiments2[0].crystal.get_A())
Ejemplo n.º 38
0
def test_reindex_against_reference(dials_regression, tmpdir):
    """Test the reindexing against a reference dataset functionality."""
    tmpdir.chdir()

    data_dir = os.path.join(dials_regression, "indexing_test_data",
                            "i04_weak_data")
    pickle_path = os.path.join(data_dir, "indexed.pickle")
    experiments_path = os.path.join(data_dir, "experiments.json")

    commands = [
        "dials.reindex",
        pickle_path,
        experiments_path,
        "change_of_basis_op=a,b,c",
        "space_group=P4",
        "output.reflections=P4.refl",
        "output.experiments=P4.expt",
    ]
    command = " ".join(commands)
    print(command)

    _ = easy_run.fully_buffered(command=command).raise_if_errors()
    assert os.path.exists("P4.refl")
    assert os.path.exists("P4.expt")
    new_experiments = load.experiment_list("P4.expt", check_format=False)
    assert new_experiments[0].crystal.get_space_group().type().hall_symbol(
    ) == " P 4"

    # Now have something in P4, get another dataset in a different indexing scheme

    cb_op = sgtbx.change_of_basis_op("a,-b,-c")
    commands = [
        "dials.reindex",
        "P4.refl",
        "P4.expt",
        "change_of_basis_op=%s" % str(cb_op),
        "output.experiments=P4_reindexed.expt",
        "output.reflections=P4_reindexed.refl",
    ]
    command = " ".join(commands)
    print(command)
    _ = easy_run.fully_buffered(command=command).raise_if_errors()

    # now run reference reindexing
    commands = [
        "dials.reindex",
        "P4.refl",
        "P4.expt",
        "reference.experiments=P4_reindexed.expt",
        "reference.reflections=P4_reindexed.refl",
    ]
    command = " ".join(commands)
    print(command)
    _ = easy_run.fully_buffered(command=command).raise_if_errors()

    # expect reindexed_reflections to be same as P4_reindexed, not P4_reflections
    reindexed_reflections = easy_pickle.load("reindexed.refl")
    P4_reindexed = easy_pickle.load("P4_reindexed.refl")
    P4_reflections = easy_pickle.load("P4.refl")

    h1, k1, l1 = reindexed_reflections["miller_index"].as_vec3_double().parts()
    h2, k2, l2 = P4_reindexed["miller_index"].as_vec3_double().parts()
    h3, k3, l3 = P4_reflections["miller_index"].as_vec3_double().parts()

    # hkl1 and hkl2 should be same, as should have been reindexed by against the
    # reference, with the program determing a reindexing operator of a,-b,-c
    assert list(h1) == pytest.approx(list(h2))
    assert list(l1) == pytest.approx(list(l2))
    assert list(k1) == pytest.approx(list(k2))
    # h1 and h3 should be same, but not l and k, as these dataset should differ
    # by an a twinning operator of a,-b,-c
    assert list(h1) == pytest.approx(list(h3))
    assert list(l1) != pytest.approx(list(l3))
    assert list(k1) != pytest.approx(list(k3))
Ejemplo n.º 39
0
def stats(model, prefix, no_ticks=True):
    # Get rid of H, multi-model, no-protein and single-atom residue models
    if (model.percent_of_single_atom_residues() > 20):
        return None
    sel = model.selection(string="protein")
    if (sel.count(True) == 0):
        return None
    ssr = "protein and not (element H or element D or resname UNX or resname UNK or resname UNL)"
    sel = model.selection(string=ssr)
    model = model.select(sel)
    if (len(model.get_hierarchy().models()) > 1):
        return None
    # Add H; this looses CRYST1 !
    rr = run_reduce_with_timeout(
        stdin_lines=model.get_hierarchy().as_pdb_string().splitlines(),
        file_name=None,
        parameters="-oh -his -flip -keep -allalt -pen9999 -",
        override_auto_timeout_with=None)
    # Create model; this is a single-model pure protein with new H added
    pdb_inp = iotbx.pdb.input(source_info=None, lines=rr.stdout_lines)
    model = mmtbx.model.manager(model_input=None,
                                build_grm=True,
                                pdb_hierarchy=pdb_inp.construct_hierarchy(),
                                process_input=True,
                                log=null_out())
    box = uctbx.non_crystallographic_unit_cell_with_the_sites_in_its_center(
        sites_cart=model.get_sites_cart(), buffer_layer=5)
    model.set_sites_cart(box.sites_cart)
    model._crystal_symmetry = box.crystal_symmetry()
    #
    N = 10
    SS = get_ss_selections(hierarchy=model.get_hierarchy())
    HB_all = find(
        model=model.select(flex.bool(model.size(), True)),
        a_DHA_cutoff=90).get_params_as_arrays(replace_with_empty_threshold=N)
    HB_alpha = find(
        model=model.select(SS.both.h_sel),
        a_DHA_cutoff=90).get_params_as_arrays(replace_with_empty_threshold=N)
    HB_beta = find(
        model=model.select(SS.both.s_sel),
        a_DHA_cutoff=90).get_params_as_arrays(replace_with_empty_threshold=N)
    print(HB_all.d_HA.size())
    result_dict = {}
    result_dict["all"] = HB_all
    result_dict["alpha"] = HB_alpha
    result_dict["beta"] = HB_beta
    #  result_dict["loop"]  = get_selected(sel=loop_sel)
    # Load histograms for reference high-resolution d_HA and a_DHA
    pkl_fn = libtbx.env.find_in_repositories(
        relative_path="mmtbx") + "/nci/d_HA_and_a_DHA_high_res.pkl"
    assert os.path.isfile(pkl_fn)
    ref = easy_pickle.load(pkl_fn)
    #
    import matplotlib as mpl
    mpl.use('Agg')
    import matplotlib.pyplot as plt
    fig = plt.figure(figsize=(10, 10))
    kwargs = dict(histtype='bar', bins=20, range=[1.6, 3.0], alpha=.8)
    for j, it in enumerate([["alpha", 1], ["beta", 3], ["all", 5]]):
        key, i = it
        ax = plt.subplot(int("32%d" % i))
        if (no_ticks):
            #ax.set_xticks([])
            ax.set_yticks([])
        if (j in [0, 1]):
            ax.tick_params(bottom=False)
            ax.set_xticklabels([])
        ax.tick_params(axis="x", labelsize=12)
        ax.tick_params(axis="y", labelsize=12, left=False, pad=-2)
        ax.text(0.98,
                0.92,
                key,
                size=12,
                horizontalalignment='right',
                transform=ax.transAxes)
        HB = result_dict[key]
        if HB is None: continue
        w1 = np.ones_like(HB.d_HA) / HB.d_HA.size()
        ax.hist(HB.d_HA, color="orangered", weights=w1, rwidth=0.3, **kwargs)
        #
        start, end1, end2 = 0, max(ref.distances[key].vals), \
          round(max(ref.distances[key].vals),2)
        if (not no_ticks):
            plt.yticks([0.01, end1], ["0", end2],
                       visible=True,
                       rotation="horizontal")

        if (key == "alpha"): plt.ylim(0, end2 + 0.02)
        elif (key == "beta"): plt.ylim(0, end2 + 0.02)
        elif (key == "all"): plt.ylim(0, end2 + 0.02)
        else: assert 0
        #
        if (j == 0): ax.set_title("Distance", size=15)
        bins = list(flex.double(ref.distances[key].bins))
        ax.bar(bins, ref.distances[key].vals, alpha=.3, width=0.07)
    #
    kwargs = dict(histtype='bar', bins=20, range=[90, 180], alpha=.8)
    for j, it in enumerate([["alpha", 2], ["beta", 4], ["all", 6]]):
        key, i = it
        ax = plt.subplot(int("32%d" % i))
        if (j in [0, 1]):
            ax.tick_params(bottom=False)
            ax.set_xticklabels([])
        if (no_ticks):
            #ax.set_xticks([])
            ax.set_yticks([])
        ax.tick_params(axis="x", labelsize=12)
        ax.tick_params(axis="y", labelsize=12, left=False, pad=-2)
        ax.text(0.98,
                0.92,
                key,
                size=12,
                horizontalalignment='right',
                transform=ax.transAxes)

        ax.text(0.98,
                0.92,
                key,
                size=12,
                horizontalalignment='right',
                transform=ax.transAxes)
        #if(j in [0,1]): ax.plot_params(bottom=False)
        HB = result_dict[key]
        if HB is None: continue
        w1 = np.ones_like(HB.a_DHA) / HB.a_DHA.size()
        ax.hist(HB.a_DHA, color="orangered", weights=w1, rwidth=0.3, **kwargs)
        #
        start, end1, end2 = 0, max(ref.angles[key].vals), \
          round(max(ref.angles[key].vals),2)
        if (not no_ticks):
            plt.yticks([0.01, end1], ["0", end2],
                       visible=True,
                       rotation="horizontal")

        if (key == "alpha"): plt.ylim(0, end2 + 0.02)
        elif (key == "beta"): plt.ylim(0, end2 + 0.02)
        elif (key == "all"): plt.ylim(0, end2 + 0.02)
        else: assert 0
        #
        if (j == 0): ax.set_title("Angle", size=15)
        ax.bar(ref.angles[key].bins, ref.angles[key].vals, width=4.5, alpha=.3)
    plt.subplots_adjust(wspace=0.12, hspace=0.025)
    if (no_ticks):
        plt.subplots_adjust(wspace=0.025, hspace=0.025)
    #fig.savefig("%s.png"%prefix, dpi=1000)
    fig.savefig("%s.pdf" % prefix)
Ejemplo n.º 40
0
  def __init__(self,
               address,
               avg_dirname=None,
               avg_basename=None,
               stddev_dirname=None,
               stddev_basename=None,
               max_dirname=None,
               max_basename=None,
               background_path=None,
               flags=None,
               hot_threshold=None,
               gain_threshold=None,
               noise_threshold=7,
               elastic_threshold=9,
               symnoise_threshold=4,
               **kwds):
    """
    @param address         Full data source address of the DAQ device
    @param avg_dirname     Directory portion of output average image
                           XXX mean
    @param avg_basename    Filename prefix of output average image XXX
                           mean
    @param flags inactive:  Eliminate the inactive pixels
                 noelastic: Eliminate elastic scattering
                 nohot:     Eliminate the hot pixels
                 nonoise:   Eliminate noisy pixels
                 symnoise:  Symmetrically eliminate noisy pixels
    @param stddev_dirname  Directory portion of output standard
                           deviation image XXX std
    @param stddev_basename Filename prefix of output standard
                           deviation image XXX std
    @param max_dirname     Directory portion of output maximum
                           projection image
    @param max_basename    Filename prefix of output maximum
                           projection image
     """

    super(average_mixin, self).__init__(
      address=address,
      **kwds
    )
    self.roi = None
    self.avg_basename = cspad_tbx.getOptString(avg_basename)
    self.avg_dirname = cspad_tbx.getOptString(avg_dirname)
    self.detector = cspad_tbx.address_split(address)[0]
    self.flags = cspad_tbx.getOptStrings(flags, default = [])
    self.stddev_basename = cspad_tbx.getOptString(stddev_basename)
    self.stddev_dirname = cspad_tbx.getOptString(stddev_dirname)
    self.max_basename = cspad_tbx.getOptString(max_basename)
    self.max_dirname = cspad_tbx.getOptString(max_dirname)
    self.background_path = cspad_tbx.getOptString(background_path)
    self.hot_threshold = cspad_tbx.getOptFloat(hot_threshold)
    self.gain_threshold = cspad_tbx.getOptFloat(gain_threshold)
    self.noise_threshold = cspad_tbx.getOptFloat(noise_threshold)
    self.elastic_threshold = cspad_tbx.getOptFloat(elastic_threshold)
    self.symnoise_threshold = cspad_tbx.getOptFloat(symnoise_threshold)

    if background_path is not None:
      background_dict = easy_pickle.load(background_path)
      self.background_img = background_dict['DATA']

    self._have_max = self.max_basename is not None or \
                     self.max_dirname is not None
    self._have_mean = self.avg_basename is not None or \
                      self.avg_dirname is not None
    self._have_std = self.stddev_basename is not None or \
                     self.stddev_dirname is not None

    # Start a server process which holds a set of Python objects that
    # other processes can manipulate using proxies.  The queues will
    # be used in endjob() to pass images between the worker processes,
    # and the lock will ensure the transfer is treated as a critical
    # section.  There is therefore the risk of a hang if the queues
    # cannot hold all the data one process will supply before another
    # empties it.
    #
    # In an attempt to alleviate this issue, separate queues are used
    # for the potentially big images.  The hope is to prevent
    # producers from blocking while consumers are locked out by using
    # more buffers.
    mgr = multiprocessing.Manager()
    self._lock = mgr.Lock()
    self._metadata = mgr.dict()
    self._queue_max = mgr.Queue()
    self._queue_sum = mgr.Queue()
    self._queue_ssq = mgr.Queue()
Ejemplo n.º 41
0
 def read_object_file(self, filepath):
     try:
         object = ep.load(filepath)
         return object
     except EOFError:
         pass
Ejemplo n.º 42
0
def run (args, source_data = None) :
  from xfel import radial_average
  from scitbx.array_family import flex
  from iotbx.detectors.cspad_detector_formats import reverse_timestamp
  from iotbx.detectors.cspad_detector_formats import detector_format_version as detector_format_function
  from spotfinder.applications.xfel import cxi_phil
  from iotbx.detectors.npy import NpyImage
  import os, sys
  from iotbx.detectors.npy import NpyImage

  user_phil = []
  # TODO: replace this stuff with iotbx.phil.process_command_line_with_files
  # as soon as I can safely modify it
  for arg in args :
    if (not "=" in arg) :
      try :
        user_phil.append(libtbx.phil.parse("""file_path=%s""" % arg))
      except ValueError as e :
        raise Sorry("Unrecognized argument '%s'" % arg)
    else :
      try :
        user_phil.append(libtbx.phil.parse(arg))
      except RuntimeError as e :
        raise Sorry("Unrecognized argument '%s' (error: %s)" % (arg, str(e)))
  params = master_phil.fetch(sources=user_phil).extract()
  if params.file_path is None or not os.path.isfile(params.file_path) and source_data is None:
    master_phil.show()
    raise Usage("file_path must be defined (either file_path=XXX, or the path alone).")
  assert params.handedness is not None
  assert params.n_bins is not None
  assert params.verbose is not None
  assert params.output_bins is not None

  if source_data is None:
    from libtbx import easy_pickle
    source_data = easy_pickle.load(params.file_path)

  if params.output_file is None:
    logger = sys.stdout
  else:
    logger = open(params.output_file, 'w')
    logger.write("%s "%params.output_file)

  if not "DETECTOR_ADDRESS" in source_data:
    # legacy format; try to guess the address
    LCLS_detector_address = 'CxiDs1-0|Cspad-0'
    if "DISTANCE" in source_data and source_data["DISTANCE"] > 1000:
      # downstream CS-PAD detector station of CXI instrument
      LCLS_detector_address = 'CxiDsd-0|Cspad-0'
  else:
    LCLS_detector_address = source_data["DETECTOR_ADDRESS"]
  timesec = reverse_timestamp( source_data["TIMESTAMP"] )[0]
  version_lookup = detector_format_function(LCLS_detector_address,timesec)
  args = [
          "distl.detector_format_version=%s"%version_lookup,
          "viewer.powder_arcs.show=False",
          "viewer.powder_arcs.code=3n9c",
         ]

  horizons_phil = cxi_phil.cxi_versioned_extract(args).persist.commands

  img = NpyImage(params.file_path, source_data)
  img.readHeader(horizons_phil)
  img.translate_tiles(horizons_phil)
  if params.verbose:
    img.show_header()

  the_tiles = img.get_tile_manager(horizons_phil).effective_tiling_as_flex_int(
        reapply_peripheral_margin=False,encode_inactive_as_zeroes=True)

  if params.beam_x is None:
    params.beam_x = img.beamx / img.pixel_size
  if params.beam_y is None:
    params.beam_y = img.beamy / img.pixel_size
  if params.verbose:
    logger.write("I think the beam center is (%s,%s)\n"%(params.beam_x, params.beam_y))

  bc = (int(params.beam_x),int(params.beam_y))

  extent = int(math.ceil(max(distance((0,0),bc),
                             distance((img.size1,0),bc),
                             distance((0,img.size2),bc),
                             distance((img.size1,img.size2),bc))))

  if params.n_bins < extent:
    params.n_bins = extent

  extent_in_mm = extent * img.pixel_size
  extent_two_theta = math.atan(extent_in_mm/img.distance)*180/math.pi

  sums    = flex.double(params.n_bins) * 0
  sums_sq = flex.double(params.n_bins) * 0
  counts  = flex.int(params.n_bins) * 0
  data    = img.get_raw_data()

  if hasattr(data,"as_double"):
    data = data.as_double()

  logger.write("Average intensity: %9.3f\n"%flex.mean(data))

  if params.verbose:
    logger.write("Generating average...tile:")
    logger.flush()
  for tile in range(len(the_tiles)//4):
    if params.verbose:
      logger.write(" %d"%tile)
      logger.flush()

    x1,y1,x2,y2 = get_tile_coords(the_tiles,tile)

    radial_average(data,bc,sums,sums_sq,counts,img.pixel_size,img.distance,
                   (x1,y1),(x2,y2))

  if params.verbose:
    logger.write(" Finishing...\n")

  # average, avoiding division by zero
  results = sums.set_selected(counts <= 0, 0)
  results /= counts.set_selected(counts <= 0, 1).as_double()

  # calculte standard devations
  std_devs = [math.sqrt((sums_sq[i]-sums[i]*results[i])/counts[i])
              if counts[i] > 0 else 0 for i in range(len(sums))]

  xvals = flex.double(len(results))
  max_twotheta = float('-inf')
  max_result   = float('-inf')

  for i in range(len(results)):
    twotheta = i * extent_two_theta/params.n_bins
    xvals[i] = twotheta

    if params.output_bins and "%.3f"%results[i] != "nan":
     #logger.write("%9.3f %9.3f\n"%     (twotheta,results[i]))        #.xy  format for Rex.cell.
      logger.write("%9.3f %9.3f %9.3f\n"%(twotheta,results[i],std_devs[i])) #.xye format for GSASII
     #logger.write("%.3f %.3f %.3f\n"%(twotheta,results[i],ds[i]))  # include calculated d spacings
    if results[i] > max_result:
      max_twotheta = twotheta
      max_result = results[i]

  logger.write("Maximum 2theta for %s, TS %s: %f, value: %f\n"%(params.file_path, source_data['TIMESTAMP'], max_twotheta, max_result))

  if params.verbose:
    from pylab import scatter, show, xlabel, ylabel, ylim
    scatter(xvals,results)
    xlabel("2 theta")
    ylabel("Avg ADUs")
    if params.plot_y_max is not None:
      ylim(0, params.plot_y_max)
    show()

  return xvals, results
Ejemplo n.º 43
0
    def onRecovery(self, e):
        # Find finished runs and display results
        int_folder = os.path.abspath('{}/integration'.format(os.curdir))

        if not os.path.isdir(int_folder):
            open_dlg = wx.DirDialog(self,
                                    "Choose the integration folder:",
                                    style=wx.DD_DEFAULT_STYLE)
            if open_dlg.ShowModal() == wx.ID_OK:
                int_folder = open_dlg.GetPath()
                open_dlg.Destroy()
            else:
                open_dlg.Destroy()
                return

        paths = [os.path.join(int_folder, p) for p in os.listdir(int_folder)]
        paths = [p for p in paths if os.path.isdir(p)]

        path_dlg = dlg.RecoveryDialog(self)
        path_dlg.insert_paths(paths)

        if path_dlg.ShowModal() == wx.ID_OK:
            self.reset_settings()
            selected = path_dlg.selected
            recovery_mode = path_dlg.recovery_mode
            int_path = selected[1]

            init_file = os.path.join(int_path, 'init.cfg')

            if os.path.isfile(init_file):
                rec_init = ep.load(init_file)
                tmp_phil = inp.master_phil.format(
                    python_object=rec_init.params)
                self.iota_phil = self.iota_phil.fetch(source=tmp_phil)
            else:
                rec_init = InitAll(iver=iota_version)
                rec_init.int_base = int_path
                rec_init.obj_base = os.path.join(int_path, 'image_objects')
                rec_init.fin_base = os.path.join(int_path, 'final')
                rec_init.log_base = os.path.join(int_path, 'logs')
                rec_init.viz_base = os.path.join(int_path, 'visualization')
                rec_init.logfile = os.path.join(int_path, 'iota.log')
                with open(rec_init.logfile, 'r') as lf:
                    lines = lf.readlines()[4:86]
                    log_phil = ip.parse(''.join(lines))
                self.iota_phil = self.iota_phil.fetch(source=log_phil)
                rec_init.params = self.iota_phil.extract()
                input_entries = [i for i in rec_init.params.input if i != None]
                rec_init.input_list = ginp.make_input_list(input_entries)

            self.gparams = self.iota_phil.extract()

            # Re-populate input window with settings from read-in run (check that
            # nothing has been moved)
            rec_target_phil_file = os.path.join(rec_init.int_base,
                                                'target.phil')
            with open(rec_target_phil_file, 'r') as pf:
                rec_target_phil = pf.read()
            self.target_phil = rec_target_phil
            self.update_input_window()

            # Re-open processing window with results of the run
            if recovery_mode == 0:
                self.proc_window = frm.ProcWindow(self,
                                                  -1,
                                                  title='Image Processing',
                                                  target_phil=rec_target_phil,
                                                  phil=self.iota_phil)
                self.proc_window.recover(int_path=rec_init.int_base,
                                         init=rec_init,
                                         status=selected[0],
                                         params=self.gparams)
                self.proc_window.Show(True)
Ejemplo n.º 44
0
def make_png(image_pickle,
             integration_pickle,
             file_name=None,
             res=600,
             show_spots=True):
    """ Write a png file visualizing integration results.
  :param image_pickle: path to image pickle file.
  :param integration_pickle: path to integration pickle file.
  :param res: resolution of output file in dpi (6x6 image size).
  :param show_spots: Uses the `obsspot` field from the `correction_vector` key to plot spot positions. Fails silently if `correction_vector` does not exist. Use option indexing.verbose_cv=True in cxi.index to make sure that this is created.
  :file_name: desired output file name. Defaults to the integration_pickle name.
  """

    if file_name is None:
        import os
        # Change extension of `image_pickle` to .png
        file_name = os.path.splitext(integration_pickle)[0] + ".png"

    # Load image pickle, and convert to image
    img_dict = ep.load(image_pickle)
    img_data = img_dict['DATA'].as_numpy_array()

    # Load integration pickle, and get coordinates of predictions
    int_d = ep.load(integration_pickle)
    predictions = int_d["mapped_predictions"][0]
    pred_coords = predictions.as_double().as_numpy_array() \
                             .reshape(2, len(predictions), order='F')

    if show_spots and 'correction_vectors' in int_d:
        spot_coords = [x['obsspot'] for x in int_d['correction_vectors'][0]]
        spot_coords = np.array(spot_coords).transpose()
        plot_spots = True
    else:
        plot_spots = False

    # Get some other useful info from integration pickle
    point_group = int_d['pointgroup']
    unit_cell = int_d['current_orientation'][0].unit_cell().parameters()
    unit_cell = ', '.join("{:.1f}".format(u) for u in unit_cell)
    mosaicity = int_d['mosaicity']

    # Create the figure
    import matplotlib.pyplot as plt
    fig = plt.figure(figsize=(6, 6))

    # 1st set of axes for the image
    ax = fig.add_subplot(1, 1, 1)
    ax.set_xlim(0, len(img_data[1]))
    ax.set_ylim(0, len(img_data[0]))
    ax.set_aspect('equal')

    # To display image properly, need to set img limit to 0.01th percentile
    # a few maxed-out pixels will make the entire image appear blank white
    clim = (img_data.min(), np.percentile(img_data, 99.99))
    ax.imshow(img_data, origin=None, cmap='Greys', clim=clim)

    # 2nd set of axes for the predictions
    ax2 = fig.add_axes(ax.get_position(), frameon=False)  # superimposed axes
    ax2.set_xlim(0, len(img_data[1]))
    ax2.set_ylim(0, len(img_data[0]))
    ax2.set_aspect('equal')

    ax2.plot(pred_coords[1],
             pred_coords[0],
             ms=2,
             linestyle='.',
             marker='o',
             alpha=0.3,
             markeredgecolor='r',
             markerfacecolor='none',
             markeredgewidth=0.3)

    if plot_spots:
        ax2.plot(spot_coords[1],
                 spot_coords[0],
                 ms=2,
                 linestyle='.',
                 marker='d',
                 alpha=0.3,
                 markeredgecolor='b',
                 markerfacecolor='none',
                 markeredgewidth=0.3,
                 linewidth=1)


    plt.title("Unit cell: {} ({}) \nNominal mosaicity: {}" \
              .format(point_group, unit_cell, mosaicity))
    plt.savefig(file_name, dpi=res, format='png')
Ejemplo n.º 45
0
def refine_hierarchical(params, merged_scope, combine_phil):
    if params.panel_filter is not None:
        from libtbx import easy_pickle
        print "Filtering out all reflections except those on panels %s" % (
            ", ".join(["%d" % p for p in params.panel_filter]))
        combined_path = "%s_combined_reflections.pickle" % params.tag
        data = easy_pickle.load(combined_path)
        sel = None
        for panel_id in params.panel_filter:
            if sel is None:
                sel = data['panel'] == panel_id
            else:
                sel |= data['panel'] == panel_id
        print "Retaining", len(
            data.select(sel)), "out of", len(data), "reflections"
        easy_pickle.dump(combined_path, data.select(sel))

    for i in xrange(params.start_at_hierarchy_level,
                    params.refine_to_hierarchy_level + 1):
        if params.rmsd_filter.enable:
            input_name = "filtered"
        else:
            if i == params.start_at_hierarchy_level:
                input_name = "combined"
            else:
                input_name = "refined"

        if params.rmsd_filter.enable:
            command = "cctbx.xfel.filter_experiments_by_rmsd %s %s output.filtered_experiments=%s output.filtered_reflections=%s"
            if i == params.start_at_hierarchy_level:
                command = command % (
                    "%s_combined_experiments.json" % params.tag,
                    "%s_combined_reflections.pickle" % params.tag,
                    "%s_filtered_experiments.json" % params.tag,
                    "%s_filtered_reflections.pickle" % params.tag)
            else:
                command = command % ("%s_refined_experiments_level%d.json" %
                                     (params.tag, i - 1),
                                     "%s_refined_reflections_level%d.pickle" %
                                     (params.tag, i - 1),
                                     "%s_filtered_experiments_level%d.json" %
                                     (params.tag, i - 1),
                                     "%s_filtered_reflections_level%d.pickle" %
                                     (params.tag, i - 1))
            command += " iqr_multiplier=%f" % params.rmsd_filter.iqr_multiplier
            print command
            result = easy_run.fully_buffered(command=command).raise_if_errors()
            result.show_stdout()

        print "Refining at hierarchy level", i
        refine_phil_file = "%s_refine_level%d.phil" % (params.tag, i)
        if i == 0:
            fix_list = ['Tau1']  # fix detector rotz
            if not params.refine_distance:
                fix_list.append('Dist')
            if params.flat_refinement:
                fix_list.extend(['Tau2', 'Tau3'])

            diff_phil = "refinement.parameterisation.detector.fix_list=%s\n" % ",".join(
                fix_list)
            if params.refine_energy:
                diff_phil += " refinement.parameterisation.beam.fix=in_spindle_plane+out_spindle_plane\n"  # allow energy to refine
        else:
            # Note, always need to fix something, so pick a panel group and fix its Tau1 (rotation around Z) always
            if params.flat_refinement and params.flat_refinement_with_distance:
                diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1,Tau2,Tau3\n"  # refine distance, rotz and xy translation
                diff_phil += "refinement.parameterisation.detector.constraints.parameter=Dist\n"  # constrain distance to be refined identically for all panels at this hierarchy level
            elif params.flat_refinement:
                diff_phil = "refinement.parameterisation.detector.fix_list=Dist,Group1Tau1,Tau2,Tau3\n"  # refine only rotz and xy translation
            else:
                diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1\n"  # refine almost everything

        if i == params.start_at_hierarchy_level:
            command = "dials.refine %s %s_%s_experiments.json %s_%s_reflections.pickle" % (
                refine_phil_file, params.tag, input_name, params.tag,
                input_name)
        else:
            command = "dials.refine %s %s_%s_experiments_level%d.json %s_%s_reflections_level%d.pickle" % (
                refine_phil_file, params.tag, input_name, i - 1, params.tag,
                input_name, i - 1)

        diff_phil += "refinement.parameterisation.detector.hierarchy_level=%d\n" % i

        command += " output.experiments=%s_refined_experiments_level%d.json output.reflections=%s_refined_reflections_level%d.pickle"%( \
          params.tag, i, params.tag, i)

        scope = merged_scope.fetch(parse(diff_phil))
        f = open(refine_phil_file, 'w')
        f.write(refine_scope.fetch_diff(scope).as_str())
        f.close()

        print command
        result = easy_run.fully_buffered(command=command).raise_if_errors()
        result.show_stdout()

    output_geometry(params)
Ejemplo n.º 46
0
def load_data():
    codes = easy_pickle.load(prefix + 'pisa.codes')
    codes = flex.std_string(codes)
    moments = easy_pickle.load(prefix + 'pisa.nlm')
    return codes, moments
Ejemplo n.º 47
0
    def __init__(self, models, log):
        db_path = libtbx.env.find_in_repositories(
            relative_path="chem_data/rama_z/top8000_rama_z_dict.pkl",
            test=os.path.isfile)
        self.log = log
        # this takes ~0.15 seconds, so I don't see a need to cache it somehow.
        self.db = easy_pickle.load(db_path)

        # =========================================================================
        # change keys in pickle to Python 3 string
        # very temporary fix until pickle is updated
        if sys.version_info.major == 3:
            from libtbx.utils import to_str
            for key in list(self.db.keys()):
                self.db[to_str(key)] = self.db[key]
                for subkey in list(self.db[key].keys()):
                    self.db[to_str(key)][to_str(subkey)] = self.db[key][subkey]
        # =========================================================================

        self.calibration_values = {
            'H': (-0.045355950779513175, 0.1951165524439217),
            'S': (-0.0425581278436754, 0.20068584887814633),
            'L': (-0.018457764754231075, 0.15788374669456848),
            'W': (-0.016806654295023003, 0.12044960331869274)
        }
        self.residue_counts = {"H": 0, "S": 0, "L": 0}
        self.z_score = {"H": None, "S": None, "L": None, 'W': None}
        self.means = {"H": {}, "S": {}, "L": {}}
        self.stds = {"H": {}, "S": {}, "L": {}}

        self.phi_step = 4
        self.psi_step = 4
        self.n_phi_half = 45
        self.n_psi_half = 45

        # this is needed to disable e.g. selection functionality when
        # multiple models are present
        self.n_models = len(models)
        self.res_info = []
        for model in models:
            if model.get_hierarchy().models_size() > 1:
                hierarchy = iotbx.pdb.hierarchy.root()
                m = model.get_hierarchy().models()[0].detached_copy()
                hierarchy.append_model(m)
                asc = hierarchy.atom_selection_cache()
            else:
                hierarchy = model.get_hierarchy()
                asc = model.get_atom_selection_cache()
            sec_str_master_phil = iotbx.phil.parse(sec_str_master_phil_str)
            ss_params = sec_str_master_phil.fetch().extract()
            ss_params.secondary_structure.protein.search_method = "from_ca"
            ss_params.secondary_structure.from_ca_conservative = True

            ssm = ss_manager(
                hierarchy,
                atom_selection_cache=asc,
                geometry_restraints_manager=None,
                sec_str_from_pdb_file=None,
                # params=None,
                params=ss_params.secondary_structure,
                was_initialized=False,
                mon_lib_srv=None,
                verbose=-1,
                log=null_out(),
                # log=sys.stdout,
            )

            filtered_ann = ssm.actual_sec_str.deep_copy()
            filtered_ann.remove_short_annotations(
                helix_min_len=4,
                sheet_min_len=4,
                keep_one_stranded_sheets=True)
            self.helix_sel = asc.selection(
                filtered_ann.overall_helices_selection())
            self.sheet_sel = asc.selection(
                filtered_ann.overall_sheets_selection())

            used_atoms = set()
            for three in generate_protein_threes(hierarchy=hierarchy,
                                                 geometry=None):
                main_residue = three[1]
                phi_psi_atoms = three.get_phi_psi_atoms()
                if phi_psi_atoms is None:
                    continue
                phi_atoms, psi_atoms = phi_psi_atoms
                key = [x.i_seq for x in phi_atoms] + [psi_atoms[-1].i_seq]
                key = "%s" % key
                if key not in used_atoms:
                    phi, psi = three.get_phi_psi_angles()
                    rkey = three.get_ramalyze_key()
                    resname = main_residue.resname
                    ss_type = self._figure_out_ss(three)
                    self.res_info.append(
                        ["", rkey, resname, ss_type, phi, psi])
                    self.residue_counts[ss_type] += 1
                    used_atoms.add(key)
        self.residue_counts["W"] = self.residue_counts[
            "H"] + self.residue_counts["S"] + self.residue_counts["L"]
Ejemplo n.º 48
0
def find_relatives(ids, cc_min, cc_max, rmax, codes, moments, nmax=10):
    indices = flex.int()
    idlist = open('id_list.txt', 'r')
    for id in idlist:
        id = id[0:4]
        indices.append(flex.first_index(codes, id))
    r_max = easy_pickle.load(prefix + 'pisa.rmax')
    nns = easy_pickle.load(prefix + 'pisa.nn')
    nn_array = math.nl_array(nmax)
    nn_indx = nn_array.nl()
    nn_total = nn_indx.size()
    q_array = flex.double(range(501)) / 2000.0

    ref_nlm_array = math.nlm_array(nmax)
    target_nlm_array = math.nlm_array(nmax)
    nlm = ref_nlm_array.nlm()
    coef_size = nlm.size()
    all_indices = range(codes.size())

    small_q_array = flex.double(range(51)) / 300.0
    mean = []
    sig = []
    for indx in indices:
        print indx
        #rmax = 50.0 #r_max[indx]
        ref_coef = moments[indx]
        ref_nlm_array.load_coefs(nlm, ref_coef[0:coef_size])
        z_model = zernike_model(ref_nlm_array, q_array, rmax, nmax)
        out_name = codes[indx] + "_.qi"
        nn_array.load_coefs(nn_indx, nns[indx][0:nn_total])
        ref_int = put_intensity(z_model, q_array, nn_array, out_name)
        mean_r = ref_int * 0.0
        sig_r = ref_int * 0.0
        small_z_model = zernike_model(ref_nlm_array, small_q_array, rmax, nmax)
        small_ref_int = small_z_model.calc_intensity(nn_array)
        small_ref_int = small_ref_int / small_ref_int[0]
        N = 0.0
        for coef, ii in zip(moments, all_indices):
            if N > 25: break
            target_nlm_array.load_coefs(nlm, coef[0:coef_size])
            align_obj = fft_align.align(ref_nlm_array,
                                        target_nlm_array,
                                        nmax=nmax,
                                        topn=10,
                                        refine=False)
            cc = align_obj.get_cc()
            if (cc >= cc_min and cc <= cc_max):
                N += 1
                nn_array.load_coefs(nn_indx, nns[ii][0:nn_total])
                opt_r_obj = optimize_r(nn_array, small_ref_int, small_q_array,
                                       nmax)
                opt_r = gss(opt_r_obj.target, rmax * 0.8, rmax * 1.2)
                z_model = zernike_model(ref_nlm_array, q_array, opt_r, nmax)
                out_name = codes[indx] + "_" + codes[ii] + ".qi.rel"
                mod_int = put_intensity(z_model, q_array, nn_array, out_name,
                                        ref_int)
                out_name = codes[indx] + "_" + codes[ii] + ".qi"
                put_intensity(z_model, q_array, nn_array, out_name)
                mod_int = mod_int - 1.0
                mean_r += mod_int
                sig_r += mod_int * mod_int
                print ii, cc, codes[ii], opt_r
        if N > 3:
            mean_r /= N
            sig_r = sig_r / N - mean_r * mean_r
            mean.append(mean_r)
            sig.append(sig_r)

    N = len(mean)
    if N > 0:
        mean_r = mean[0] * 0.0
        s_r = mean[0] * 0.0
        for uu in range(N):
            mean_r += mean[uu]
            s_r += sig[uu]
        mean_r /= N
        s_r /= N
        s_r = flex.sqrt(s_r)
        f = open('q_m_s_%s.dat' % rmax, 'w')
        for q, m, s in zip(q_array, mean_r, s_r):
            print >> f, q, m, s
Ejemplo n.º 49
0
      metro_style = "cbf"

      from xfel.cftbx.detector.cspad_cbf_tbx import cbf_file_to_basis_dict
      metro = cbf_file_to_basis_dict(params.new_metrology)

    else:
      metro_style = "flatfile"

      from xfel.cftbx.detector.cspad_cbf_tbx import read_optical_metrology_from_flat_file, asic_dimension, asic_gap

      metro = read_optical_metrology_from_flat_file(params.new_metrology, params.detector, img['PIXEL_SIZE'],
                                                    asic_dimension, asic_gap, plot=params.plot)

  for filename in params.pickle_file:
    # Read the pickle file and pull the tiles out of it
    img = easy_pickle.load(filename)

    tiles = {}
    asics = {}
    data = img['DATA']

    if os.path.isdir(params.old_metrology):
      num_sections = len(sections)

      for p in range(num_sections):
        for s in range(len(sections[p])):

          # Pull the sensor block from the image, and rotate it back to
          # the "lying down" convention.
          c = sections[p][s].corners_asic()
          k = (int(round(-sections[p][s].angle / 90.0)) + 1) % 4
Ejemplo n.º 50
0
    def get_folders(self):
        pdb_code_list = list()
        skip_data = dict()
        if self.params.models is not None:
            for m in self.params.models:
                pdb_code_list.append(m.pdb_code)

        size = flex.double()
        folders = list()
        for root, dirs, files in os.walk(cryodir):
            if os.path.basename(root) == 'maps_and_models': continue
            if os.path.basename(root) == 'unique': continue
            # DEBUG
            #if (len(folders)>10): break
            #
            prefix = os.path.basename(root)
            #
            pdb_code = prefix[0:4]
            if pdb_code_list:
                if pdb_code not in pdb_code_list: continue
            # Check if pickle file exists
            pkl_file = os.path.join(root, prefix + '.pkl')
            if (not os.path.isfile(pkl_file)):
                print('No pickle file for ', prefix, file=self.logger)
                skip_data[prefix] = 'No pickle file'
                continue
            pd = easy_pickle.load(pkl_file)
            # Check resolution limit
            has_min_resolution = self.check_resolution(pd=pd)
            if not has_min_resolution:
                skip_data[prefix] = 'Resolution is worse than %sA: %sA' % \
                  (self.params.resolution_limit, pd.resolution)
                continue

            top_dir = os.path.join(data_dir, prefix)
            currentMonth = datetime.datetime.now().month
            currentYear = datetime.datetime.now().year
            date_str = format(currentMonth, '02') + '_' + str(currentYear)
            dest_dir = os.path.join(top_dir, date_str)
            #self.dest_dir = dest_dir

            #dest_dir = os.path.join(data_dir, prefix)
            json_filename = os.path.join(dest_dir, pdb_code + '.json')
            if (os.path.isfile(json_filename)
                    and self.params.overwrite is False):
                with open(json_filename, 'r') as fp:
                    data = json.load(fp)

                    task_list = list(self.params.tasks)
                    if len(task_list) < 5:
                        # don't run statistics or plots if refinement was not performed
                        # (but it will run if failed)
                        if ((data['success_refinement'] is None)
                                and ('statistics' in self.params.tasks
                                     or 'plots' in self.params.tasks)
                                and ('refinement') not in self.params.tasks):
                            print('Refinement not yet performed for: ',
                                  prefix,
                                  file=self.logger)
                            continue
                        # don't run refinement if readyset failed or was not performed
                        if (not data['success_readyset']
                                and 'refinement' in self.params.tasks):
                            print('Readyset not performed or failed: ',
                                  prefix,
                                  file=self.logger)
                            continue
                        # don't run if task already performed
                        if not self.params.run_again:
                            do_continue = False
                            for task_str in [
                                    'readyset', 'refinement', 'statistics',
                                    'plots'
                            ]:
                                if task_str in self.params.tasks and data[
                                        'success_' + task_str] is True:
                                    msg = 'Task "%s" was already successfully performed for %s. \
                  \nUse keyword "run_again" to overrun previous results' % (
                                        task_str, prefix)
                                    print(msg, file=self.logger)
                                    do_continue = True
                                    break
                            if do_continue: continue
                    # if all tasks are chosen, only run if at least one failed
                    elif len(task_list) == 5:
                        _l = [data['success_' + x] for x in task_list]
                        if (_l.count(True) == 5):
                            continue
                            #raise Sorry(msg)
            # Check if other error
            if pd.error is not None:
                skip_data[prefix] = pd.error
                continue
            # Check if PDB file exists (TODO accept also only cif instances)
            pdb_filename = os.path.join(root, prefix + '.pdb')
            if (os.path.isfile(pdb_filename)):
                size.append(os.path.getsize(pdb_filename))
                folders.append(root)
            elif prefix not in skip_data.keys():
                skip_data[prefix] = 'No PDB file'

            #print(pd.resolution, pd.model_file, pd.map_file, pd.map_file_1, pd.map_file_2, pd.error)

        # sort folders according to PDB file size, smallest are processed first
        tmp = []
        for i in flex.sort_permutation(size):
            tmp.append(folders[i])
        folders = tmp[:]

        if not self.params.models:
            json_filename = os.path.join(data_dir, 'skipped.json')
            with open(json_filename, 'w') as jfp:
                json.dump(skip_data, jfp, sort_keys=True, indent=4)

        return folders
Ejemplo n.º 51
0
def parse_pickle(filename, out=sys.stdout):
    print "===== Loading Pickle: %s =====" % filename
    ringer_things = easy_pickle.load(filename)
    return process_raw_results(ringer_things, out=out)
Ejemplo n.º 52
0
    def run(self):
        #
        folders = list

        if (self.params.mode == 'queue'):
            cmd_dict = {}
            for i in [1, 2, 3, 4, 6]:
                cmd_dict[i] = list()

        if (self.params.folder is not None):
            prefix = os.path.basename(os.path.normpath(self.params.folder))
            pkl_file = os.path.join(cryodir, prefix, prefix + '.pkl')
            if (not os.path.isfile(pkl_file)):
                print('No pickle file for ', prefix, file=self.logger)
                return
            pd = easy_pickle.load(pkl_file)
            has_min_resolution = self.check_resolution(pd=pd)
            if not has_min_resolution: return
            folders = [self.params.folder]
        else:
            folders = self.get_folders()
        #
        if folders:
            print('Performing tasks: ', self.params.tasks, file=self.logger)

        n_jobs = 0
        for folder in folders:
            # -------------------------------------
            if (self.params.mode == 'individual'):
                # -------------------------------------
                #n_jobs += 1
                rr = rerefine(logger=self.logger,
                              folder=folder,
                              params=self.params)
                #
                if ('composition' in self.params.tasks):
                    rr.get_composition()
                #
                if ('readyset' in self.params.tasks):
                    rr.find_ligands_and_get_cif()
#        #
                if ('refinement' in self.params.tasks):
                    rr.run_real_space_refine()
#        #
                if ('statistics' in self.params.tasks):
                    rr.get_statistics()
                    rr.get_composition(run_map_box=False)
#        #
                if ('plots' in self.params.tasks):
                    rr.get_plots()
                #
                rr.save_results()
            # -------------------------------------
            if (self.params.mode == 'queue'):
                # -------------------------------------
                prefix = os.path.basename(folder)
                pdb_filename = os.path.join(folder, prefix + '.pdb')
                pdb_size = os.path.getsize(pdb_filename) / (1024.0 * 1024.0)
                #
                # TODO not sure what to do with humonguous models
                if (pdb_size >= 50): continue
                #
                queue_tasks = '+'.join(self.params.tasks)
                n_jobs += 1
                #if n_jobs == 100: break
                cmds = [
                    'iotbx.python', script, 'mode=individual',
                    'folder=%s' % folder,
                    'tasks=%s' % queue_tasks
                ]
                if self.params.rsr_options:
                    for param in self.params.rsr_options:
                        cmds.append("rsr_options='" + param.rsr_param + "'")
                if self.params.add_hydrogen:
                    cmds.append('add_hydrogen=True')
                if self.params.overwrite:
                    cmds.append('overwrite=True')
                cmd = " ".join(cmds)

                if (pdb_size < 5): cmd_dict[1].append(cmd)
                elif (pdb_size >= 5 and pdb_size < 10): cmd_dict[2].append(cmd)
                elif (pdb_size >= 10 and pdb_size < 15):
                    cmd_dict[3].append(cmd)
                elif (pdb_size >= 15 and pdb_size < 20):
                    cmd_dict[4].append(cmd)
                elif (pdb_size >= 20 and pdb_size < 50):
                    cmd_dict[6].append(cmd)

        if (self.params.mode == 'queue'):
            for threads in cmd_dict:
                commands = cmd_dict[threads]
                params_threads = [int(i) for i in self.params.threads]
                if threads not in params_threads: continue

                for command in commands[:6]:
                    print(command)

                if not self.params.machine:
                    machine = 'rebus'
                else:
                    machine = self.params.machine
                if threads > 1:
                    qsub_cmd = 'qsub -q all.q@%s -pe threaded %s' % (machine,
                                                                     threads)
                else:
                    qsub_cmd = 'qsub -q all.q@%s' % machine

                currentMonth = datetime.datetime.now().month
                currentYear = datetime.datetime.now().year
                date_str = format(currentMonth, '02') + '_' + str(currentYear)

                queue_log_dir = os.path.join(
                    data_dir, 'queue_logs_' + date_str,
                    str(datetime.date.today()) + '_' + str(threads) +
                    '_threads')
                if (not os.path.isdir(queue_log_dir)):
                    os.makedirs(queue_log_dir)
                else:
                    time_hour = str(datetime.datetime.now())[11:16]
                    time_hour = time_hour.replace(':', '_')
                    queue_log_dir = queue_log_dir + '_' + time_hour
                    os.makedirs(queue_log_dir)

                print(queue_log_dir)

                easy_qsub.run(
                    phenix_source=phenix_dir,
                    where=queue_log_dir,
                    commands=commands,
                    #qsub_cmd       = 'qsub -q all.q@%s -pe threaded 4' % machine,
                    qsub_cmd=qsub_cmd,
                    #qsub_cmd       = 'qsub -q all.q@morse',
                    js=3,
                    size_of_chunks=1)
Ejemplo n.º 53
0
    def __init__(self,
                 pixel_histograms,
                 output_dirname=".",
                 gain_map_path=None,
                 gain_map=None,
                 method="photon_counting",
                 estimated_gain=30,
                 nproc=None,
                 photon_threshold=2 / 3,
                 roi=None,
                 run=None):
        assert method in ("sum_adu", "photon_counting")
        self.sum_img = flex.double(flex.grid(
            370, 391), 0)  # XXX define the image size some other way?
        gain_img = flex.double(self.sum_img.accessor(), 0)

        assert [gain_map, gain_map_path].count(None) > 0
        if gain_map_path is not None:
            d = easy_pickle.load(gain_map_path)
            gain_map = d["DATA"]

        two_photon_threshold = photon_threshold + 1

        mask = flex.int(self.sum_img.accessor(), 0)

        start_row = 370
        end_row = 0
        print(len(pixel_histograms.histograms))

        pixels = list(pixel_histograms.pixels())
        n_pixels = len(pixels)
        if roi is not None:
            for k, (i, j) in enumerate(reversed(pixels)):
                if (i < roi[2] or i > roi[3] or j < roi[0] or j > roi[1]):
                    del pixels[n_pixels - k - 1]

        if gain_map is None:
            fixed_func = pixel_histograms.fit_one_histogram
        else:

            def fixed_func(pixel):
                return pixel_histograms.fit_one_histogram(pixel, n_gaussians=1)

        results = None
        if nproc is None: nproc = easy_mp.Auto
        nproc = easy_mp.get_processes(nproc)
        print("nproc: ", nproc)

        stdout_and_results = easy_mp.pool_map(
            processes=nproc,
            fixed_func=fixed_func,
            args=pixels,
            func_wrapper="buffer_stdout_stderr")
        results = [r for so, r in stdout_and_results]

        gains = flex.double()

        for i, pixel in enumerate(pixels):
            start_row = min(start_row, pixel[0])
            end_row = max(end_row, pixel[0])
            n_photons = 0
            if results is None:
                # i.e. not multiprocessing
                try:
                    gaussians = pixel_histograms.fit_one_histogram(pixel)
                except RuntimeError as e:
                    print("Error fitting pixel %s" % str(pixel))
                    print(str(e))
                    mask[pixel] = 1
                    continue
            else:
                gaussians = results[i]
            hist = pixel_histograms.histograms[pixel]
            if gaussians is None:
                # Presumably the peak fitting failed in some way
                print("Skipping pixel %s" % str(pixel))
                continue
            zero_peak_diff = gaussians[0].params[1]
            if gain_map is None:
                try:
                    view_pixel_histograms.check_pixel_histogram_fit(
                        hist, gaussians)
                except view_pixel_histograms.PixelFitError as e:
                    print("PixelFitError:", str(pixel), str(e))
                    mask[pixel] = 1
                    continue
                gain = gaussians[1].params[1] - gaussians[0].params[1]
                gain_img[pixel] = gain
                gain_ratio = gain / estimated_gain
            else:
                gain = gain_map[pixel]
                if gain == 0:
                    print("bad gain!!!!!", pixel)
                    continue
                gain = 30 / gain
                gain_ratio = 1 / gain
            gains.append(gain)

            #for g in gaussians:
            #sigma = abs(g.params[2])
            #if sigma < 1 or sigma > 10:
            #print "bad sigma!!!!!", pixel, sigma
            #mask[pixel] = 1
            #continue
            if method == "sum_adu":
                sum_adu = 0
                one_photon_cutoff, two_photon_cutoff = [
                    (threshold * gain + zero_peak_diff)
                    for threshold in (photon_threshold, two_photon_threshold)
                ]
                i_one_photon_cutoff = hist.get_i_slot(one_photon_cutoff)
                slots = hist.slots().as_double()
                slot_centers = hist.slot_centers()
                slots -= gaussians[0](slot_centers)
                for j in range(i_one_photon_cutoff, len(slots)):
                    center = slot_centers[j]
                    sum_adu += slots[j] * (center - zero_peak_diff) * 30 / gain

                self.sum_img[pixel] = sum_adu
            elif method == "photon_counting":
                one_photon_cutoff, two_photon_cutoff = [
                    (threshold * gain + zero_peak_diff)
                    for threshold in (photon_threshold, two_photon_threshold)
                ]
                i_one_photon_cutoff = hist.get_i_slot(one_photon_cutoff)
                i_two_photon_cutoff = hist.get_i_slot(two_photon_cutoff)
                slots = hist.slots()
                for j in range(i_one_photon_cutoff, len(slots)):
                    if j == i_one_photon_cutoff:
                        center = hist.slot_centers()[j]
                        upper = center + 0.5 * hist.slot_width()
                        n_photons += int(
                            round((upper - one_photon_cutoff) /
                                  hist.slot_width() * slots[j]))
                    elif j == i_two_photon_cutoff:
                        center = hist.slot_centers()[j]
                        upper = center + 0.5 * hist.slot_width()
                        n_photons += 2 * int(
                            round((upper - two_photon_cutoff) /
                                  hist.slot_width() * slots[j]))
                    elif j < i_two_photon_cutoff:
                        n_photons += int(round(slots[j]))
                    else:
                        n_photons += 2 * int(round(slots[j]))
                self.sum_img[pixel] = n_photons

        stats = scitbx.math.basic_statistics(gains)
        print("gain statistics:")
        stats.show()

        mask.set_selected(self.sum_img == 0, 1)
        unbound_pixel_mask = xes_finalise.cspad_unbound_pixel_mask()
        mask.set_selected(unbound_pixel_mask > 0, 1)
        bad_pixel_mask = xes_finalise.cspad2x2_bad_pixel_mask_cxi_run7()
        mask.set_selected(bad_pixel_mask > 0, 1)

        for row in range(self.sum_img.all()[0]):
            self.sum_img[row:row + 1, :].count(0)

        spectrum_focus = self.sum_img[start_row:end_row, :]
        mask_focus = mask[start_row:end_row, :]

        spectrum_focus.set_selected(mask_focus > 0, 0)

        xes_finalise.filter_outlying_pixels(spectrum_focus, mask_focus)

        print("Number of rows: %i" % spectrum_focus.all()[0])
        print("Estimated no. photons counted: %i" % flex.sum(spectrum_focus))
        print("Number of images used: %i" %
              flex.sum(pixel_histograms.histograms.values()[0].slots()))

        d = cspad_tbx.dpack(
            address='CxiSc1-0|Cspad2x2-0',
            data=spectrum_focus,
            distance=1,
            ccd_image_saturation=2e8,  # XXX
        )
        if run is not None: runstr = "_%04d" % run
        else: runstr = ""
        cspad_tbx.dwritef(d, output_dirname, 'sum%s_' % runstr)

        if gain_map is None:
            gain_map = flex.double(gain_img.accessor(), 0)
            img_sel = (gain_img > 0).as_1d()
            d = cspad_tbx.dpack(address='CxiSc1-0|Cspad2x2-0',
                                data=gain_img,
                                distance=1)
            cspad_tbx.dwritef(d, output_dirname, 'raw_gain_map_')
            gain_map.as_1d().set_selected(img_sel.iselection(),
                                          1 / gain_img.as_1d().select(img_sel))
            gain_map /= flex.mean(gain_map.as_1d().select(img_sel))
            d = cspad_tbx.dpack(address='CxiSc1-0|Cspad2x2-0',
                                data=gain_map,
                                distance=1)
            cspad_tbx.dwritef(d, output_dirname, 'gain_map_')

        plot_x, plot_y = xes_finalise.output_spectrum(
            spectrum_focus.iround(),
            mask_focus=mask_focus,
            output_dirname=output_dirname,
            run=run)
        self.spectrum = (plot_x, plot_y)
        self.spectrum_focus = spectrum_focus

        xes_finalise.output_matlab_form(
            spectrum_focus, "%s/sum%s.m" % (output_dirname, runstr))
        print(output_dirname)
Ejemplo n.º 54
0
    def __init__(self,
                 path=None,
                 filename=None,
                 crystal_num=0,
                 remove_negative=False,
                 use_b=True,
                 scale=True,
                 dicti=None,
                 pixel_size=None):
        """
    Constructor for SingleFrame object, using a cctbx.xfel integration pickle.

    :param path: path to integration pickle
    :param filename: the file name alone (used as a label)
    :param crystal_num: if multiple lattices present, the latice number.
    :param remove_negative: Boolean for removal of negative intensities
    :param use_b: if True, initialise scale and B, if false, use only mean-intensity scaling.
    :param dicti: optional. If a dictionairy is supplied here, will create object from that rather than attempting to read the file specified in path, filename.
    :param pixel_size: the size of pixels in mm. Defaults to a MAR detector with a warning at debug level of logging.
    :param scale: if False, will intialise scales to G=1, B=0.


    :return: a SingleFrame object, with the following Object attributes:


    Object attributes are:
        - `is_polarization_corrected`: Boolean flag indicatinf if polarization correction has been applied
        - `miller_array`: the cctbx.miller miller array of spot intensities.
        - `mapped_predictions`: the mapped_predictions locations
        - `path`: full path to the original file
        - `name`: file-name, used as an identifier
        - `crystal_system:
        - `pg`: point group of pickle
        - `uc`: Niggli unit cell as a tuple
        - `orientation`: cctbx crystal_orientation object
        - `total_i`: the total integrated intensity for this frame
        - `xbeam`: x-location of beam centre
        - `ybeam`: y-location of beam centre
        - `wavelength:
        - `spot_offset`: the mean offset between observed spots and predicted centroids. Only created if integration was performed using verbose_cv=True. Otherwise None.
        - `minus_2B`: the gradient of the ln(i) vs. sinsqtheta_over_lambda_sq plot
        - `G`: intercept of the of the ln(i) vs. sinsqtheta_over_lambda_sq plot
        - `log_i`: list of log_i intensities
        - `sinsqtheta_over_lambda_sq`: list of sinsqtheta_over_lambda_sq
        - `wilson_err`: standard error on the fit of ln(i) vs. sinsqtheta_over_lambda_sq
        - `miller_fullies`: a cctbx.miller array of fully recorded intensites.
    """
        if dicti is not None:
            d = dicti
        else:
            try:
                d = easy_pickle.load(path)
            except (cPickle.UnpicklingError, ValueError, EOFError, IOError):
                d = {}
                logger.warning(
                    "Could not read %s. It may not be a pickle file." % path)
        if 'observations' not in d:
            return
        try:
            if pixel_size:
                self.pixel_size = pixel_size
            else:
                logger.debug(
                    "No pixel size specified, defaulting to MAR (0.079346). "
                    "Bad times if this is not the correct detector!")
                self.pixel_size = 0.079346
            # Warn on error, but continue directory traversal.
            self.is_polarization_corrected = False
            # Miller arrays
            self.miller_array = d['observations'][crystal_num]
            self.mapped_predictions = d['mapped_predictions'][crystal_num]
            # Image pickle info
            self.path = path
            self.name = filename
            # Unit cell info
            self.crystal_system = self.miller_array.crystal_symmetry()\
              .space_group().crystal_system()
            self.pg = d['pointgroup'].replace(' ', '')  # enforce consistency
            self.uc = d['current_orientation'][crystal_num].unit_cell() \
              .niggli_cell() \
              .parameters()
            self.orientation = d['current_orientation'][crystal_num]
            # Agregate info
            self.total_i = d['observations'][crystal_num].sum()
            self.xbeam = d['xbeam']
            self.ybeam = d['ybeam']
            self.wavelength = d['wavelength']
            self.distance = d['distance']
            if 'correction_vectors' in d:
                all_corrections = []
                for spot in d['correction_vectors'][crystal_num]:
                    dta = np.sqrt(
                        (spot['refinedcenter'][0] - spot['obscenter'][0])**2 +
                        (spot['refinedcenter'][1] - spot['obscenter'][1])**2)
                    all_corrections.append(dta)
                self.spot_offset = np.mean(all_corrections)
            else:
                self.spot_offset = None

            if remove_negative:
                self.filter_negative_intensities()

            # Do polarization correction
            self.polarization_correction()
            self.minus_2B, self.G, self.log_i, \
                self.sinsqtheta_over_lambda_sq, \
                self.wilson_err = self.init_calc_wilson(use_b)
            if not scale:
                self.minus_2B = 0
                self.G = 1
            if logger.root.level < logging.DEBUG:  # Extreme debug!
                self.plot_wilson()
            logger.debug("Extracted image {}".format(filename))
        except KeyError:
            logger.warning(
                "Could not extract point group and unit cell from %s" % path)

        self.miller_fullies = None
Ejemplo n.º 55
0
def easy_run_plot_multirun_stats(pickle):
  from libtbx import easy_pickle
  contents = easy_pickle.load(pickle)
  stats_tuple, d_min, n_multiples, run_tags, run_statuses, minimalist, interactive, xsize, ysize, high_vis, title = contents
  print(plot_run_stats(stats_tuple, d_min, n_multiples, run_tags=run_tags, run_statuses=run_statuses, minimalist=minimalist,
    interactive=interactive, xsize=xsize, ysize=ysize, high_vis=high_vis, title=title))
Ejemplo n.º 56
0
def average(argv=None):
    if argv == None:
        argv = sys.argv[1:]

    try:
        from mpi4py import MPI
    except ImportError:
        raise Sorry("MPI not found")

    command_line = (libtbx.option_parser.option_parser(usage="""
%s [-p] -c config -x experiment -a address -r run -d detz_offset [-o outputdir] [-A averagepath] [-S stddevpath] [-M maxpath] [-n numevents] [-s skipnevents] [-v] [-m] [-b bin_size] [-X override_beam_x] [-Y override_beam_y] [-D xtc_dir] [-f] [-g gain_mask_value] [--min] [--minpath minpath]

To write image pickles use -p, otherwise the program writes CSPAD CBFs.
Writing CBFs requires the geometry to be already deployed.

Examples:
cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571

Use one process on the current node to process all the events from run 25 of
experiment cxi49812, using a detz_offset of 571.

mpirun -n 16 cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571

As above, using 16 cores on the current node.

bsub -a mympi -n 100 -o average.out -q psanaq cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 -o cxi49812

As above, using the psanaq and 100 cores, putting the log in average.out and
the output images in the folder cxi49812.
""" % libtbx.env.dispatcher_name).option(
        None,
        "--as_pickle",
        "-p",
        action="store_true",
        default=False,
        dest="as_pickle",
        help="Write results as image pickle files instead of cbf files"
    ).option(
        None,
        "--raw_data",
        "-R",
        action="store_true",
        default=False,
        dest="raw_data",
        help=
        "Disable psana corrections such as dark pedestal subtraction or common mode (cbf only)"
    ).option(
        None,
        "--background_pickle",
        "-B",
        default=None,
        dest="background_pickle",
        help=""
    ).option(
        None,
        "--config",
        "-c",
        type="string",
        default=None,
        dest="config",
        metavar="PATH",
        help="psana config file"
    ).option(
        None,
        "--experiment",
        "-x",
        type="string",
        default=None,
        dest="experiment",
        help="experiment name (eg cxi84914)"
    ).option(
        None,
        "--run",
        "-r",
        type="int",
        default=None,
        dest="run",
        help="run number"
    ).option(
        None,
        "--address",
        "-a",
        type="string",
        default="CxiDs2.0:Cspad.0",
        dest="address",
        help="detector address name (eg CxiDs2.0:Cspad.0)"
    ).option(
        None,
        "--detz_offset",
        "-d",
        type="float",
        default=None,
        dest="detz_offset",
        help=
        "offset (in mm) from sample interaction region to back of CSPAD detector rail (CXI), or detector distance (XPP)"
    ).option(
        None,
        "--outputdir",
        "-o",
        type="string",
        default=".",
        dest="outputdir",
        metavar="PATH",
        help="Optional path to output directory for output files"
    ).option(
        None,
        "--averagebase",
        "-A",
        type="string",
        default="{experiment!l}_avg-r{run:04d}",
        dest="averagepath",
        metavar="PATH",
        help=
        "Path to output average image without extension. String substitution allowed"
    ).option(
        None,
        "--stddevbase",
        "-S",
        type="string",
        default="{experiment!l}_stddev-r{run:04d}",
        dest="stddevpath",
        metavar="PATH",
        help=
        "Path to output standard deviation image without extension. String substitution allowed"
    ).option(
        None,
        "--maxbase",
        "-M",
        type="string",
        default="{experiment!l}_max-r{run:04d}",
        dest="maxpath",
        metavar="PATH",
        help=
        "Path to output maximum projection image without extension. String substitution allowed"
    ).option(
        None,
        "--numevents",
        "-n",
        type="int",
        default=None,
        dest="numevents",
        help="Maximum number of events to process. Default: all"
    ).option(
        None,
        "--skipevents",
        "-s",
        type="int",
        default=0,
        dest="skipevents",
        help="Number of events in the beginning of the run to skip. Default: 0"
    ).option(
        None,
        "--verbose",
        "-v",
        action="store_true",
        default=False,
        dest="verbose",
        help="Print more information about progress"
    ).option(
        None,
        "--pickle-optical-metrology",
        "-m",
        action="store_true",
        default=False,
        dest="pickle_optical_metrology",
        help=
        "If writing pickle files, use the optical metrology in the experiment's calib directory"
    ).option(
        None,
        "--bin_size",
        "-b",
        type="int",
        default=None,
        dest="bin_size",
        help="Rayonix detector bin size"
    ).option(
        None,
        "--override_beam_x",
        "-X",
        type="float",
        default=None,
        dest="override_beam_x",
        help="Rayonix detector beam center x coordinate"
    ).option(
        None,
        "--override_beam_y",
        "-Y",
        type="float",
        default=None,
        dest="override_beam_y",
        help="Rayonix detector beam center y coordinate"
    ).option(
        None,
        "--calib_dir",
        "-C",
        type="string",
        default=None,
        dest="calib_dir",
        metavar="PATH",
        help="calibration directory"
    ).option(
        None,
        "--pickle_calib_dir",
        "-P",
        type="string",
        default=None,
        dest="pickle_calib_dir",
        metavar="PATH",
        help=
        "pickle calibration directory specification. Replaces --calib_dir functionality."
    ).option(
        None,
        "--xtc_dir",
        "-D",
        type="string",
        default=None,
        dest="xtc_dir",
        metavar="PATH",
        help="xtc stream directory"
    ).option(
        None,
        "--use_ffb",
        "-f",
        action="store_true",
        default=False,
        dest="use_ffb",
        help=
        "Use the fast feedback filesystem at LCLS. Only for the active experiment!"
    ).option(
        None,
        "--gain_mask_value",
        "-g",
        type="float",
        default=None,
        dest="gain_mask_value",
        help=
        "Ratio between low and high gain pixels, if CSPAD in mixed-gain mode. Only used in CBF averaging mode."
    ).option(
        None,
        "--min",
        None,
        action="store_true",
        default=False,
        dest="do_minimum_projection",
        help="Output a minimum projection"
    ).option(
        None,
        "--minpath",
        None,
        type="string",
        default="{experiment!l}_min-r{run:04d}",
        dest="minpath",
        metavar="PATH",
        help=
        "Path to output minimum image without extension. String substitution allowed"
    )).process(args=argv)


    if len(command_line.args) > 0 or \
        command_line.options.as_pickle is None or \
        command_line.options.experiment is None or \
        command_line.options.run is None or \
        command_line.options.address is None or \
        command_line.options.detz_offset is None or \
        command_line.options.averagepath is None or \
        command_line.options.stddevpath is None or \
        command_line.options.maxpath is None or \
        command_line.options.pickle_optical_metrology is None:
        command_line.parser.show_help()
        return

    # set this to sys.maxint to analyze all events
    if command_line.options.numevents is None:
        maxevents = sys.maxsize
    else:
        maxevents = command_line.options.numevents

    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    size = comm.Get_size()

    if command_line.options.config is not None:
        psana.setConfigFile(command_line.options.config)
    dataset_name = "exp=%s:run=%d:smd" % (command_line.options.experiment,
                                          command_line.options.run)
    if command_line.options.xtc_dir is not None:
        if command_line.options.use_ffb:
            raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system")
        dataset_name += ":dir=%s" % command_line.options.xtc_dir
    elif command_line.options.use_ffb:
        # as ffb is only at SLAC, ok to hardcode /reg/d here
        dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc" % (
            command_line.options.experiment[0:3],
            command_line.options.experiment)
    if command_line.options.calib_dir is not None:
        psana.setOption('psana.calib-dir', command_line.options.calib_dir)
    ds = psana.DataSource(dataset_name)
    address = command_line.options.address
    src = psana.Source('DetInfo(%s)' % address)
    nevent = np.array([0.])

    if command_line.options.background_pickle is not None:
        background = easy_pickle.load(
            command_line.options.background_pickle)['DATA'].as_numpy_array()

    for run in ds.runs():
        runnumber = run.run()

        if not command_line.options.as_pickle:
            psana_det = psana.Detector(address, ds.env())

        # list of all events
        if command_line.options.skipevents > 0:
            print("Skipping first %d events" % command_line.options.skipevents)
        elif "Rayonix" in command_line.options.address:
            print("Skipping first image in the Rayonix detector"
                  )  # Shuttering issue
            command_line.options.skipevents = 1

        for i, evt in enumerate(run.events()):
            if i % size != rank: continue
            if i < command_line.options.skipevents: continue
            if i >= maxevents: break
            if i % 10 == 0: print('Rank', rank, 'processing event', i)
            #print "Event #",rank*mylength+i," has id:",evt.get(EventId)
            if 'Rayonix' in command_line.options.address or 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
                data = evt.get(psana.Camera.FrameV1, src)
                if data is None:
                    print("No data")
                    continue
                data = data.data16().astype(np.float64)
            elif command_line.options.as_pickle:
                data = evt.get(psana.ndarray_float64_3, src, 'image0')
            else:
                # get numpy array, 32x185x388
                from xfel.cftbx.detector.cspad_cbf_tbx import get_psana_corrected_data
                if command_line.options.raw_data:
                    data = get_psana_corrected_data(psana_det,
                                                    evt,
                                                    use_default=False,
                                                    dark=False,
                                                    common_mode=None,
                                                    apply_gain_mask=False,
                                                    per_pixel_gain=False)
                else:
                    if command_line.options.gain_mask_value is None:
                        data = get_psana_corrected_data(psana_det,
                                                        evt,
                                                        use_default=True)
                    else:
                        data = get_psana_corrected_data(
                            psana_det,
                            evt,
                            use_default=False,
                            dark=True,
                            common_mode=None,
                            apply_gain_mask=True,
                            gain_mask_value=command_line.options.
                            gain_mask_value,
                            per_pixel_gain=False)

            if data is None:
                print("No data")
                continue

            if command_line.options.background_pickle is not None:
                data -= background

            if 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
                distance = np.array([0.0])
                wavelength = np.array([1.0])
            else:
                d = cspad_tbx.env_distance(address, run.env(),
                                           command_line.options.detz_offset)
                if d is None:
                    print("No distance, using distance",
                          command_line.options.detz_offset)
                    assert command_line.options.detz_offset is not None
                    if 'distance' not in locals():
                        distance = np.array([command_line.options.detz_offset])
                    else:
                        distance += command_line.options.detz_offset
                else:
                    if 'distance' in locals():
                        distance += d
                    else:
                        distance = np.array([float(d)])

                w = cspad_tbx.evt_wavelength(evt)
                if w is None:
                    print("No wavelength")
                    if 'wavelength' not in locals():
                        wavelength = np.array([1.0])
                else:
                    if 'wavelength' in locals():
                        wavelength += w
                    else:
                        wavelength = np.array([w])

            t = cspad_tbx.evt_time(evt)
            if t is None:
                print("No timestamp, skipping shot")
                continue
            if 'timestamp' in locals():
                timestamp += t[0] + (t[1] / 1000)
            else:
                timestamp = np.array([t[0] + (t[1] / 1000)])

            if 'sum' in locals():
                sum += data
            else:
                sum = np.array(data, copy=True)
            if 'sumsq' in locals():
                sumsq += data * data
            else:
                sumsq = data * data
            if 'maximum' in locals():
                maximum = np.maximum(maximum, data)
            else:
                maximum = np.array(data, copy=True)

            if command_line.options.do_minimum_projection:
                if 'minimum' in locals():
                    minimum = np.minimum(minimum, data)
                else:
                    minimum = np.array(data, copy=True)

            nevent += 1

    #sum the images across mpi cores
    if size > 1:
        print("Synchronizing rank", rank)
    totevent = np.zeros(nevent.shape)
    comm.Reduce(nevent, totevent)

    if rank == 0 and totevent[0] == 0:
        raise Sorry("No events found in the run")

    sumall = np.zeros(sum.shape).astype(sum.dtype)
    comm.Reduce(sum, sumall)

    sumsqall = np.zeros(sumsq.shape).astype(sumsq.dtype)
    comm.Reduce(sumsq, sumsqall)

    maxall = np.zeros(maximum.shape).astype(maximum.dtype)
    comm.Reduce(maximum, maxall, op=MPI.MAX)

    if command_line.options.do_minimum_projection:
        minall = np.zeros(maximum.shape).astype(minimum.dtype)
        comm.Reduce(minimum, minall, op=MPI.MIN)

    waveall = np.zeros(wavelength.shape).astype(wavelength.dtype)
    comm.Reduce(wavelength, waveall)

    distall = np.zeros(distance.shape).astype(distance.dtype)
    comm.Reduce(distance, distall)

    timeall = np.zeros(timestamp.shape).astype(timestamp.dtype)
    comm.Reduce(timestamp, timeall)

    if rank == 0:
        if size > 1:
            print("Synchronized")

        # Accumulating floating-point numbers introduces errors,
        # which may cause negative variances.  Since a two-pass
        # approach is unacceptable, the standard deviation is
        # clamped at zero.
        mean = sumall / float(totevent[0])
        variance = (sumsqall / float(totevent[0])) - (mean**2)
        variance[variance < 0] = 0
        stddev = np.sqrt(variance)

        wavelength = waveall[0] / totevent[0]
        distance = distall[0] / totevent[0]
        pixel_size = cspad_tbx.pixel_size
        saturated_value = cspad_tbx.cspad_saturated_value
        timestamp = timeall[0] / totevent[0]
        timestamp = (int(timestamp), timestamp % int(timestamp) * 1000)
        timestamp = cspad_tbx.evt_timestamp(timestamp)

        if command_line.options.as_pickle:
            extension = ".pickle"
        else:
            extension = ".cbf"

        dest_paths = [
            cspad_tbx.pathsubst(command_line.options.averagepath + extension,
                                evt, ds.env()),
            cspad_tbx.pathsubst(command_line.options.stddevpath + extension,
                                evt, ds.env()),
            cspad_tbx.pathsubst(command_line.options.maxpath + extension, evt,
                                ds.env())
        ]
        if command_line.options.do_minimum_projection:
            dest_paths.append(
                cspad_tbx.pathsubst(command_line.options.minpath + extension,
                                    evt, ds.env()))

        dest_paths = [
            os.path.join(command_line.options.outputdir, path)
            for path in dest_paths
        ]
        if 'Rayonix' in command_line.options.address:
            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)
            from xfel.cxi.cspad_ana import rayonix_tbx
            pixel_size = rayonix_tbx.get_rayonix_pixel_size(
                command_line.options.bin_size)
            beam_center = [
                command_line.options.override_beam_x,
                command_line.options.override_beam_y
            ]
            active_areas = flex.int([0, 0, mean.shape[1], mean.shape[0]])
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]
            for data, path in zip(all_data, dest_paths):
                print("Saving", path)
                d = cspad_tbx.dpack(
                    active_areas=active_areas,
                    address=old_style_address,
                    beam_center_x=pixel_size * beam_center[0],
                    beam_center_y=pixel_size * beam_center[1],
                    data=flex.double(data),
                    distance=distance,
                    pixel_size=pixel_size,
                    saturated_value=rayonix_tbx.rayonix_saturated_value,
                    timestamp=timestamp,
                    wavelength=wavelength)
                easy_pickle.dump(path, d)
        elif 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
            all_data = [mean, stddev, maxall]
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)
            for data, path in zip(all_data, dest_paths):
                d = cspad_tbx.dpack(address=old_style_address,
                                    data=flex.double(data),
                                    distance=distance,
                                    pixel_size=0.1,
                                    timestamp=timestamp,
                                    wavelength=wavelength)
                print("Saving", path)
                easy_pickle.dump(path, d)
        elif command_line.options.as_pickle:
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]

            xpp = 'xpp' in address.lower()
            if xpp:
                evt_time = cspad_tbx.evt_time(
                    evt)  # tuple of seconds, milliseconds
                timestamp = cspad_tbx.evt_timestamp(
                    evt_time)  # human readable format
                from iotbx.detectors.cspad_detector_formats import detector_format_version, reverse_timestamp
                from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas
                version_lookup = detector_format_version(
                    old_style_address,
                    reverse_timestamp(timestamp)[0])
                assert version_lookup is not None
                active_areas = xpp_active_areas[version_lookup]['active_areas']
                beam_center = [1765 // 2, 1765 // 2]
            else:
                if command_line.options.pickle_calib_dir is not None:
                    metro_path = command_line.options.pickle_calib_dir
                elif command_line.options.pickle_optical_metrology:
                    from xfel.cftbx.detector.cspad_cbf_tbx import get_calib_file_path
                    metro_path = get_calib_file_path(run.env(), address, run)
                else:
                    metro_path = libtbx.env.find_in_repositories(
                        "xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0")
                sections = parse_calib.calib2sections(metro_path)
                beam_center, active_areas = cspad_tbx.cbcaa(
                    cspad_tbx.getConfig(address, ds.env()), sections)

            class fake_quad(object):
                def __init__(self, q, d):
                    self.q = q
                    self.d = d

                def quad(self):
                    return self.q

                def data(self):
                    return self.d

            if xpp:
                quads = [
                    fake_quad(i, mean[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                mean = cspad_tbx.image_xpp(old_style_address,
                                           None,
                                           ds.env(),
                                           active_areas,
                                           quads=quads)
                mean = flex.double(mean.astype(np.float64))

                quads = [
                    fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                stddev = cspad_tbx.image_xpp(old_style_address,
                                             None,
                                             ds.env(),
                                             active_areas,
                                             quads=quads)
                stddev = flex.double(stddev.astype(np.float64))

                quads = [
                    fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                maxall = cspad_tbx.image_xpp(old_style_address,
                                             None,
                                             ds.env(),
                                             active_areas,
                                             quads=quads)
                maxall = flex.double(maxall.astype(np.float64))

                if command_line.options.do_minimum_projection:
                    quads = [
                        fake_quad(i, minall[i * 8:(i + 1) * 8, :, :])
                        for i in range(4)
                    ]
                    minall = cspad_tbx.image_xpp(old_style_address,
                                                 None,
                                                 ds.env(),
                                                 active_areas,
                                                 quads=quads)
                    minall = flex.double(minall.astype(np.float64))
            else:
                quads = [
                    fake_quad(i, mean[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                mean = cspad_tbx.CsPadDetector(address,
                                               evt,
                                               ds.env(),
                                               sections,
                                               quads=quads)
                mean = flex.double(mean.astype(np.float64))

                quads = [
                    fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                stddev = cspad_tbx.CsPadDetector(address,
                                                 evt,
                                                 ds.env(),
                                                 sections,
                                                 quads=quads)
                stddev = flex.double(stddev.astype(np.float64))

                quads = [
                    fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                maxall = cspad_tbx.CsPadDetector(address,
                                                 evt,
                                                 ds.env(),
                                                 sections,
                                                 quads=quads)
                maxall = flex.double(maxall.astype(np.float64))

                if command_line.options.do_minimum_projection:
                    quads = [
                        fake_quad(i, minall[i * 8:(i + 1) * 8, :, :])
                        for i in range(4)
                    ]
                    minall = cspad_tbx.CsPadDetector(address,
                                                     evt,
                                                     ds.env(),
                                                     sections,
                                                     quads=quads)
                    minall = flex.double(minall.astype(np.float64))

            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)

            for data, path in zip(all_data, dest_paths):
                print("Saving", path)

                d = cspad_tbx.dpack(active_areas=active_areas,
                                    address=old_style_address,
                                    beam_center_x=pixel_size * beam_center[0],
                                    beam_center_y=pixel_size * beam_center[1],
                                    data=data,
                                    distance=distance,
                                    pixel_size=pixel_size,
                                    saturated_value=saturated_value,
                                    timestamp=timestamp,
                                    wavelength=wavelength)

                easy_pickle.dump(path, d)
        else:
            # load a header only cspad cbf from the slac metrology
            from xfel.cftbx.detector import cspad_cbf_tbx
            import pycbf
            base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(
                run, address)
            if base_dxtbx is None:
                raise Sorry("Couldn't load calibration file for run %d" %
                            run.run())

            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)

            for data, path in zip(all_data, dest_paths):
                print("Saving", path)
                cspad_img = cspad_cbf_tbx.format_object_from_data(
                    base_dxtbx,
                    data,
                    distance,
                    wavelength,
                    timestamp,
                    address,
                    round_to_int=False)
                cspad_img._cbf_handle.write_widefile(path.encode(), pycbf.CBF,\
                  pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)
Ejemplo n.º 57
0
def run(args):
    phil = iotbx.phil.process_command_line(args=args,
                                           master_string=master_phil).show()
    usage = \
    """ %s input.experiment=experimentname input.run_num=N input.address=address
  """%libtbx.env.dispatcher_name

    params = phil.work.extract()
    if not os.path.exists(params.output.output_dir):
        raise Sorry("Output path not found:" + params.output.output_dir)

    if params.input.experiment is None or \
    params.input.run_num is None or \
    params.input.address is None:
        raise Usage(usage)
    # set up psana
    if params.dispatch.events_accepted or params.dispatch.events_rejected:
        assert params.input.cfg is not None
        setConfigFile(params.input.cfg)

    dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment,
                                          params.input.run_num)
    ds = DataSource(dataset_name)
    src = Source('DetInfo(%s)' % params.input.address)
    # set up multiprocessing with MPI
    from mpi4py import MPI
    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()  # each process in MPI has a unique id, 0-indexed
    size = comm.Get_size()  # size: number of processes running in this job

    if params.dispatch.max_events is None:
        max_events = sys.maxint
    else:
        max_events = params.dispatch.max_events
    if params.input.dark is not None:
        dark = easy_pickle.load('%s' % params.input.dark)
    for run in ds.runs():
        times = run.times()
        if (params.dispatch.events_begin is None
                and params.dispatch.events_end is None):
            times = times[:]
        elif (params.dispatch.events_begin is not None
              and params.dispatch.events_end is None):
            times = times[params.dispatch.events_begin:]
        elif (params.dispatch.events_begin is None
              and params.dispatch.events_end is not None):
            times = times[:params.dispatch.events_end]
        elif (params.dispatch.events_begin is not None
              and params.dispatch.events_end is not None):
            times = times[params.dispatch.events_begin:params.dispatch.
                          events_end]
        nevents = min(len(times), max_events)
        # chop the list into pieces, depending on rank.  This assigns each process
        # events such that the get every Nth event where N is the number of processes
        mytimes = [times[i] for i in range(nevents) if (i + rank) % size == 0]
        print len(mytimes)
        #mytimes = mytimes[len(mytimes)-1000:len(mytimes)]
        totals = np.array([0.0])
        print "initial totals", totals

        for i, t in enumerate(mytimes):
            print "Event", i, "of", len(mytimes),
            evt = run.event(t)
            if params.dispatch.events_accepted or params.dispatch.events_all:
                if evt.get("skip_event") == True:
                    continue
            elif params.dispatch.events_rejected:
                if evt.get("skip_event") == False:
                    continue
            try:
                data = evt.get(Camera.FrameV1, src)
            except ValueError as e:
                src = Source('BldInfo(%s)' % params.input.address)
                data = evt.get(Bld.BldDataSpectrometerV1, src)
            if data is None:
                print "No data"
                continue
            #set default to determine FEE data type
            two_D = False
            #check attribute of data for type
            try:
                data = np.array(data.data16().astype(np.int32))
                two_D = True
            except AttributeError as e:
                data = np.array(data.hproj().astype(np.float64))

            if two_D:
                if 'dark' in locals():
                    data = data - dark
                one_D_data = np.sum(data, 0) / data.shape[0]
                two_D_data = np.double(data)
            else:
                #used to fix underflow problem that was present in earlier release of psana and pressent for LH80
                for i in range(len(data)):
                    if data[i] > 1000000000:
                        data[i] = data[i] - (2**32)
                if 'dark' in locals():
                    data = data - dark
                one_D_data = data

            totals[0] += 1
            print "total good:", totals[0]

            if not 'fee_one_D' in locals():
                fee_one_D = one_D_data
            else:
                fee_one_D += one_D_data
            if ('two_D_data' in locals() and not 'fee_two_D' in locals()):
                fee_two_D = two_D_data
            elif 'fee_two_D' in locals():
                fee_two_D += two_D_data

        acceptedtotals = np.zeros(totals.shape)
        acceptedfee1 = np.zeros((fee_one_D.shape))
        if 'fee_two_D' in locals():
            acceptedfee2 = np.zeros((fee_two_D.shape))
        print "Synchronizing rank", rank
    comm.Reduce(fee_one_D, acceptedfee1)
    comm.Reduce(totals, acceptedtotals)
    if 'acceptedfee2' in locals():
        comm.Reduce(fee_two_D, acceptedfee2)
    print "number averaged", acceptedtotals[0]
    if rank == 0:
        if acceptedtotals[0] > 0:
            acceptedfee1 /= acceptedtotals[0]
            if 'acceptedfee2' in locals():
                acceptedfee2 /= acceptedtotals[0]

        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        from pylab import savefig, close
        from matplotlib.backends.backend_pdf import PdfPages
        import matplotlib.pyplot as plt
        from matplotlib import cm

        if params.dispatch.events_accepted:
            easy_pickle.dump(
                os.path.join(
                    params.output.output_dir, "fee_avg_1_D_" +
                    'r%s' % params.input.run_num + "_accepted.pickle"),
                acceptedfee1)
            pp1 = PdfPages(
                os.path.join(
                    params.output.output_dir, "fee_avg_1_D_" +
                    'r%s' % params.input.run_num + "_accepted.pdf"))
            if 'acceptedfee2' in locals():
                easy_pickle.dump(
                    os.path.join(
                        params.output.output_dir, "fee_avg_2_D_" +
                        'r%s' % params.input.run_num + "_accepted.pickle"),
                    acceptedfee2)
                pp2 = PdfPages(
                    os.path.join(
                        params.output.output_dir, "fee_avg_2_D_" +
                        'r%s' % params.input.run_num + "_accepted.pdf"))
        if params.dispatch.events_all:
            easy_pickle.dump(
                os.path.join(
                    params.output.output_dir, "fee_avg_1_D_" +
                    'r%s' % params.input.run_num + "_all.pickle"),
                acceptedfee1)
            pp1 = PdfPages(
                os.path.join(
                    params.output.output_dir, "fee_avg_1_D_" +
                    'r%s' % params.input.run_num + "_all.pdf"))
            if 'acceptedfee2' in locals():
                easy_pickle.dump(
                    os.path.join(
                        params.output.output_dir, "fee_avg_2_D_" +
                        'r%s' % params.input.run_num + "_all.pickle"),
                    acceptedfee2)
                pp2 = PdfPages(
                    os.path.join(
                        params.output.output_dir, "fee_avg_2_D_" +
                        'r%s' % params.input.run_num + "_all.pdf"))
        if params.dispatch.events_rejected:
            easy_pickle.dump(
                os.path.join(
                    params.output.output_dir, "fee_avg_1_D_" +
                    'r%s' % params.input.run_num + "_rejected.pickle"),
                acceptedfee1)
            pp1 = PdfPages(
                os.path.join(
                    params.output.output_dir, "fee_avg_1_D_" +
                    'r%s' % params.input.run_num + "_rejected.pdf"))
            if 'acceptedfee2' in locals():
                easy_pickle.dump(
                    os.path.join(
                        params.output.output_dir, "fee_avg_2_D_" +
                        'r%s' % params.input.run_num + "_rejected.pickle"),
                    acceptedfee2)
                pp2 = PdfPages(
                    os.path.join(
                        params.output.output_dir, "fee_avg_2_D_" +
                        'r%s' % params.input.run_num + "_rejected.pdf"))
        print "Done"
        #plotting result
        # matplotlib needs a different backend when run on the cluster nodes at SLAC
        # these two lines not needed when working interactively at SLAC, or on mac or on viper

        if params.input.pixel_to_eV.energy_per_px is not None:
            xvals = (
                np.array(range(acceptedfee1.shape[0])) -
                params.input.pixel_to_eV.x_coord_one
            ) * params.input.pixel_to_eV.energy_per_px + params.input.pixel_to_eV.y_coord_one
            xvals = xvals[::-1]

        if params.input.pixel_to_eV.x_coord_two is not None:
            eV_per_px = (params.input.pixel_to_eV.y_coord_two -
                         params.input.pixel_to_eV.y_coord_one) / (
                             params.input.pixel_to_eV.x_coord_two -
                             params.input.pixel_to_eV.x_coord_one)
            xvals = (np.array(range(acceptedfee1.shape[0])) -
                     params.input.pixel_to_eV.x_coord_one
                     ) * eV_per_px + params.input.pixel_to_eV.y_coord_one
            xvals = xvals[::-1]

        if params.input.pixel_to_eV.x_coord_two is None and params.input.pixel_to_eV.energy_per_px is None:
            xvals = np.arange(0, len(acceptedfee1), 1)

        yvals = acceptedfee1

        def OneD_plot(X, Y):
            plt.figure()
            plt.clf()
            plt.plot(X, Y)
            if params.dispatch.events_accepted:
                plt.title('Accepted Shots FEE Spectrum Run %s' %
                          params.input.run_num)
            elif params.dispatch.events_all:
                plt.title('All Shots FEE Spectrum Run %s' %
                          params.input.run_num)
            elif params.dispatch.events_rejected:
                plt.title('Rejected Shots FEE Spectrum Run %s' %
                          params.input.run_num)
            if params.input.pixel_to_eV.x_coord_one is not None:
                plt.xlabel('eV', fontsize=13)
            else:
                plt.xlabel('pixels', fontsize=13)
            plt.ylabel('pixels', fontsize=13)
            pp1.savefig()

        def TwoD_plot(data):
            plt.figure()
            ax = plt.gca()
            #  use specified range 0, 50 to plot runs 117 - 201
            #min=0, vmax=50
            cax = ax.imshow(data,
                            interpolation='nearest',
                            origin='lower',
                            cmap=cm.coolwarm)
            plt.colorbar(cax, fraction=0.014, pad=0.04)
            if params.dispatch.events_accepted:
                ax.set_title('Accepted 2-D FEE Spectrum Run %s' %
                             params.input.run_num)
            elif params.dispatch.events_all:
                ax.set_title('All 2-D FEE Spectrum Run %s' %
                             params.input.run_num)
            elif params.dispatch.events_rejected:
                ax.set_title('Rejected 2-D FEE Spectrum Run %s' %
                             params.input.run_num)
                pp2.savefig()

        OneD_plot(xvals, yvals)
        pp1.close()
        if 'acceptedfee2' in locals():
            TwoD_plot(acceptedfee2)
            pp2.close()
Ejemplo n.º 58
0
  def run(self):
    from os.path import join, exists
    from libtbx import easy_run

    assert(exists(join(self.path, "datablock.json")))

    input_filename = join(self.path, "datablock.json")

    # Call dials.generate_mask
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
    ]).raise_if_errors()

    assert(exists("mask.pickle"))

    print 'OK'

    # Call dials.generate_mask
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask2.pickle',
      'output.datablock=masked_datablock.json',
      'untrusted.rectangle=100,200,100,200'
    ]).raise_if_errors().show_stdout()
    assert(exists("mask2.pickle"))
    assert(exists("masked_datablock.json"))
    from dxtbx.serialize import load
    datablocks = load.datablock("masked_datablock.json")
    imageset = datablocks[0].extract_imagesets()[0]
    import os
    assert imageset.external_lookup.mask.filename == os.path.join(
      os.path.abspath(os.getcwd()), 'mask2.pickle')

    print 'OK'

    # Call dials.generate_mask
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask3.pickle',
      'untrusted.circle=100,100,10'
    ]).raise_if_errors()
    assert(exists("mask3.pickle"))

    print 'OK'

    # Call dials.generate_mask
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask4.pickle',
      'resolution_range=2,3',
    ]).raise_if_errors()
    assert(exists("mask4.pickle"))

    print 'OK'

    # Call dials.generate_mask
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask5.pickle',
      'd_min=3',
      'd_max=2',
    ]).raise_if_errors()
    assert(exists("mask5.pickle"))

    print 'OK'

    # Call dials.generate_mask
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask6.pickle',
      '\'ice_rings{filter=True;d_min=2}\'',
    ]).raise_if_errors()
    assert(exists("mask6.pickle"))

    print 'OK'

    # Call dials.generate_mask
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask3.pickle',
      'untrusted.polygon=100,100,100,200,200,200,200,100',
      'untrusted.pixel=0,0',
      'untrusted.pixel=1,1'
    ]).raise_if_errors()
    assert(exists("mask3.pickle"))
    from libtbx import easy_pickle
    mask = easy_pickle.load("mask3.pickle")
    assert not mask[0][0,0]
    assert not mask[0][1,1]
    assert mask[0][0,1]

    print 'OK'
Ejemplo n.º 59
0
def run():
    if not have_dials_regression:
        print "Skipping tst_reindex: dials_regression not available."
        return

    data_dir = os.path.join(dials_regression, "indexing_test_data",
                            "i04_weak_data")
    pickle_path = os.path.join(data_dir, "indexed.pickle")
    experiments_path = os.path.join(data_dir, "experiments.json")
    commands = [
        "dials.reindex", pickle_path, experiments_path,
        "change_of_basis_op=2a,b,c", "space_group=P1"
    ]
    command = " ".join(commands)
    print command
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory()
    os.chdir(tmp_dir)
    result = easy_run.fully_buffered(command=command).raise_if_errors()
    old_reflections = easy_pickle.load(pickle_path)
    assert os.path.exists('reindexed_reflections.pickle')
    new_reflections = easy_pickle.load('reindexed_reflections.pickle')
    old_experiments = load.experiment_list(experiments_path,
                                           check_format=False)
    assert os.path.exists('reindexed_experiments.json')
    new_experiments = load.experiment_list('reindexed_experiments.json',
                                           check_format=False)
    h1, k1, l1 = old_reflections['miller_index'].as_vec3_double().parts()
    h2, k2, l2 = new_reflections['miller_index'].as_vec3_double().parts()
    assert approx_equal(2 * h1, h2)
    assert approx_equal(k1, k2)
    assert approx_equal(l1, l2)
    old_uc_params = old_experiments[0].crystal.get_unit_cell().parameters()
    new_uc_params = new_experiments[0].crystal.get_unit_cell().parameters()
    assert approx_equal(2 * old_uc_params[0], new_uc_params[0])
    assert approx_equal(old_uc_params[1:], new_uc_params[1:])
    assert old_experiments[0].crystal.get_space_group().type().hall_symbol(
    ) == ' P 1'
    assert new_experiments[0].crystal.get_space_group().type().hall_symbol(
    ) == ' P 1'

    # set space group P4
    from cctbx import sgtbx
    cb_op = sgtbx.change_of_basis_op('a,b,c')
    commands = [
        "dials.reindex", experiments_path, "space_group=P4",
        "change_of_basis_op=%s" % str(cb_op), "output.experiments=P4.json"
    ]
    command = " ".join(commands)
    print command
    result = easy_run.fully_buffered(command=command).raise_if_errors()
    # apply one of the symops from the space group
    cb_op = sgtbx.change_of_basis_op('-x,-y,z')
    commands = [
        "dials.reindex", "P4.json",
        "change_of_basis_op=%s" % str(cb_op),
        "output.experiments=P4_reindexed.json"
    ]
    command = " ".join(commands)
    print command
    result = easy_run.fully_buffered(command=command).raise_if_errors()
    new_experiments1 = load.experiment_list('P4_reindexed.json',
                                            check_format=False)
    assert approx_equal(new_experiments1[0].crystal.get_A(),
                        old_experiments[0].crystal.change_basis(cb_op).get_A())
    #
    cb_op = sgtbx.change_of_basis_op('-x,-y,z')
    commands = [
        "dials.reindex", "P4.json", "change_of_basis_op=auto",
        "reference=P4_reindexed.json", "output.experiments=P4_reindexed2.json"
    ]
    command = " ".join(commands)
    print command
    result = easy_run.fully_buffered(command=command).raise_if_errors()
    new_experiments2 = load.experiment_list('P4_reindexed2.json',
                                            check_format=False)
    assert approx_equal(new_experiments1[0].crystal.get_A(),
                        new_experiments2[0].crystal.get_A())
Ejemplo n.º 60
0
def cv_png(image_pickle,
           integration_pickle,
           file_name=None,
           res=600,
           show_spots=True):
    """ Write a png file visualizing the correction vectors.
  :param image_pickle: path to image pickle file.
  :param integration_pickle: path to integration pickle file.
  :param res: resolution of output file in dpi (6x6 image size).
  :file_name: desired output file name. Defaults to the integration_pickle name.
  """

    cmap = mpl.cm.jet

    if file_name is None:
        import os
        # Change extension of `image_pickle` to .png
        file_name = os.path.splitext(integration_pickle)[0] + ".png"

    # Load image pickle, and convert to image
    img_dict = ep.load(image_pickle)
    img_data = img_dict['DATA'].as_numpy_array()

    # Load integration pickle, and get coordinates of predictions
    int_d = ep.load(integration_pickle)
    spot_coords = [x['obsspot'] for x in int_d['correction_vectors'][0]]
    spot_coords = np.array(spot_coords).transpose()
    pred_coords = [x['predspot'] for x in int_d['correction_vectors'][0]]
    pred_coords = np.array(pred_coords).transpose()
    corr_vecs = pred_coords - spot_coords
    corr_factors = np.sqrt(abs(corr_vecs[0]**2 - corr_vecs[1]**2))

    # Get some other useful info from integration pickle
    point_group = int_d['pointgroup']
    unit_cell = int_d['current_orientation'][0].unit_cell().parameters()
    unit_cell = ', '.join("{:.1f}".format(u) for u in unit_cell)
    mosaicity = int_d['mosaicity']

    # Create the figure
    import matplotlib.pyplot as plt
    fig = plt.figure(figsize=(6, 6))

    # 1st set of axes for the image
    ax = fig.add_subplot(1, 1, 1)
    plt.subplots_adjust(right=0.8)
    ax.set_xlim(0, len(img_data[1]))
    ax.set_ylim(0, len(img_data[0]))
    ax.set_aspect('equal')

    # To display image properly, need to set img limit to 0.01th percentile
    # a few maxed-out pixels will make the entire image appear blank white
    clim = (img_data.min(), np.percentile(img_data, 99.99))
    ax.imshow(img_data, origin=None, cmap='Greys', clim=clim)

    # 2nd set of axes for the predictions
    ax2 = fig.add_axes(ax.get_position(), frameon=False)  # superimposed axes
    ax2.set_xlim(0, len(img_data[1]))
    ax2.set_ylim(0, len(img_data[0]))
    ax2.set_aspect('equal')

    norm = mpl.colors.Normalize(vmin=0, vmax=round(max(corr_factors)))
    ax2.scatter(spot_coords[1],
                spot_coords[0],
                c=norm(corr_factors),
                cmap=cmap,
                alpha=0.5,
                s=5,
                linewidths=0)

    cax = plt.axes([0.85, 0.15, 0.05, 0.69])
    cbar = mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
    cbar.set_label('Spot-prediction distance [px]')

    ax.set_title("Prediction-offset values\nfor spotfinder results")
    plt.savefig(file_name, dpi=res, format='png')