Example #1
0
def exercise_2 () :
  hkl_file = libtbx.env.find_in_repositories(
    relative_path="phenix_regression/wizards/p9_se_w2.sca",
    test=os.path.isfile)
  if (hkl_file is None) :
    warnings.warn("phenix_regression not available, skipping test")
    return
  hkl_in = file_reader.any_file(hkl_file).assert_file_type("hkl")
  i_obs_raw = hkl_in.file_object.as_miller_arrays(
    merge_equivalents=False,
    crystal_symmetry=crystal.symmetry(
      space_group_symbol="I4",
      unit_cell=(113.949,113.949,32.474,90,90,90)))[0]
  i_obs = i_obs_raw.merge_equivalents().array()
  # completeness and data strength
  cstats = ds.i_sigi_completeness_stats(i_obs)
  d_min_cut = cstats.resolution_cut
  assert approx_equal(d_min_cut, 2.150815)
  ws = ds.wilson_scaling(
    miller_array=i_obs,
    n_residues=120)
  # outliers - this shouldn't actually work, since it requires additional
  # processing steps on the input data
  try :
    outliers = ds.possible_outliers(i_obs)
  except AssertionError :
    pass
  else :
    raise Exception_expected
  ######################################################################
  # OVERALL ANALYSIS
  pdb_file = libtbx.env.find_in_repositories(
    relative_path="phenix_examples/p9-build/p9.pdb",
    test=os.path.isfile)
  f_calc = None
  if (pdb_file is not None) :
    pdb_in = file_reader.any_file(pdb_file).assert_file_type("pdb")
    hierarchy = pdb_in.file_object.hierarchy
    xrs = pdb_in.file_object.xray_structure_simple(
      crystal_symmetry=i_obs)
    f_calc = xrs.structure_factors(d_min=i_obs.d_min()).f_calc()
    f_calc = abs(f_calc).generate_bijvoet_mates()
    f_calc = f_calc.set_observation_type_xray_amplitude()
    i_obs, f_calc = i_obs.common_sets(other=f_calc)
    open("tmp_xtriage.pdb", "w").write(hierarchy.as_pdb_string(
      crystal_symmetry=i_obs))
    pdb_file = "tmp_xtriage.pdb"
  params = xtriage.master_params.extract()
  params.scaling.input.asu_contents.n_residues = 141
  result = xtriage.xtriage_analyses(
    miller_obs=i_obs,
    miller_calc=f_calc,
    params=params,
    unmerged_obs=i_obs_raw,
    text_out=open("logfile3.log", "w"))#sys.stdout)
  # XXX there appears to be some system-dependence here, hence sloppy limits
  assert (15.5 < result.aniso_b_min < 15.9)
  assert (10 < result.aniso_range_of_b < 11)
  # check relative Wilson
  if (pdb_file is not None) :
    assert (result.relative_wilson is not None)
    # FIXME
    #assert (result.relative_wilson.n_outliers() == 34)
  #show_pickled_object_sizes(result)
  test_pickle_consistency_and_size(result)
  # XXX PDB validation server
  assert approx_equal(result.iso_b_wilson, 18.33, eps=0.1)
  assert approx_equal(result.aniso_b_ratio, 0.546, eps=0.1)
  assert (result.number_of_wilson_outliers == 0)
  assert approx_equal(result.l_test_mean_l, 0.493, eps=0.1)
  assert approx_equal(result.l_test_mean_l_squared, 0.326, eps=0.1)
  assert approx_equal(result.i_over_sigma_outer_shell, 3.25, eps=0.1)
  assert ("No significant pseudotranslation is detected" in
          result.patterson_verdict)
  # test consistency of output after pickling and unpickling
  try :
    from phenix_dev.phenix_cloud import xtriage_json
  except ImportError :
    pass
  else :
    json_out = xtriage_json.json_output("p9.sca")
    result.show(out=json_out)
    open("xtriage.json", "w").write(json_out.export())
  # unmerged data
  assert result.merging_stats is not None
  out = StringIO()
  result.merging_stats.show(out=out)
  assert ("R-merge: 0.073" in out.getvalue())
  assert approx_equal(result.estimate_d_min(min_i_over_sigma=10), 1.9645,
    eps=0.001)
  # FIXME PDB doesn't actually have unit cell!
  # test detection of symmetry in reference file
  if (pdb_file is not None) :
    args = [hkl_file, pdb_file]
    result = xtriage.run(args=args, out=null_out())
Example #2
0
def exercise_2():
    hkl_file = libtbx.env.find_in_repositories(
        relative_path="phenix_regression/wizards/data/p9_se_w2.sca",
        test=os.path.isfile)
    if (hkl_file is None):
        warnings.warn("phenix_regression not available, skipping test")
        return
    hkl_in = file_reader.any_file(hkl_file).assert_file_type("hkl")
    i_obs_raw = hkl_in.file_object.as_miller_arrays(
        merge_equivalents=False,
        crystal_symmetry=crystal.symmetry(space_group_symbol="I4",
                                          unit_cell=(113.949, 113.949, 32.474,
                                                     90, 90, 90)))[0]
    i_obs = i_obs_raw.merge_equivalents().array()
    # completeness and data strength
    cstats = ds.i_sigi_completeness_stats(i_obs)
    d_min_cut = cstats.resolution_cut
    assert approx_equal(d_min_cut, 2.150815)
    ws = ds.wilson_scaling(miller_array=i_obs, n_residues=120)
    # outliers - this shouldn't actually work, since it requires additional
    # processing steps on the input data
    try:
        outliers = ds.possible_outliers(i_obs)
    except AssertionError:
        pass
    else:
        raise Exception_expected
    ######################################################################
    # OVERALL ANALYSIS
    pdb_file = libtbx.env.find_in_repositories(
        relative_path="phenix_examples/p9-build/p9.pdb", test=os.path.isfile)
    f_calc = None
    if (pdb_file is not None):
        pdb_in = file_reader.any_file(pdb_file).assert_file_type("pdb")
        hierarchy = pdb_in.file_object.hierarchy
        xrs = pdb_in.file_object.xray_structure_simple(crystal_symmetry=i_obs)
        f_calc = xrs.structure_factors(d_min=i_obs.d_min()).f_calc()
        f_calc = abs(f_calc).generate_bijvoet_mates()
        f_calc = f_calc.set_observation_type_xray_amplitude()
        i_obs, f_calc = i_obs.common_sets(other=f_calc)
        open("tmp_xtriage.pdb",
             "w").write(hierarchy.as_pdb_string(crystal_symmetry=i_obs))
        pdb_file = "tmp_xtriage.pdb"
    params = xtriage.master_params.extract()
    params.scaling.input.asu_contents.n_residues = 141
    result = xtriage.xtriage_analyses(miller_obs=i_obs,
                                      miller_calc=f_calc,
                                      params=params,
                                      unmerged_obs=i_obs_raw,
                                      text_out=open("logfile3.log",
                                                    "w"))  #sys.stdout)
    # XXX there appears to be some system-dependence here, hence sloppy limits
    assert (15.5 < result.aniso_b_min < 15.9)
    assert (10 < result.aniso_range_of_b < 11)
    # check relative Wilson
    if (pdb_file is not None):
        assert (result.relative_wilson is not None)
        # FIXME
        #assert (result.relative_wilson.n_outliers() == 34)
    #show_pickled_object_sizes(result)
    test_pickle_consistency_and_size(result)
    # XXX PDB validation server
    assert approx_equal(result.iso_b_wilson, 18.33, eps=0.1)
    assert approx_equal(result.aniso_b_ratio, 0.546, eps=0.1)
    assert (result.number_of_wilson_outliers == 0)
    assert approx_equal(result.l_test_mean_l, 0.493, eps=0.1)
    assert approx_equal(result.l_test_mean_l_squared, 0.326, eps=0.1)
    assert approx_equal(result.i_over_sigma_outer_shell, 3.25, eps=0.1)
    assert approx_equal(result.overall_i_sig_i, 10.34, eps=0.1)
    assert approx_equal(
        result.anomalous_info.plan_sad_experiment_stats.get_overall(
            item="i_over_sigma_dict"),
        10.61,
        eps=0.1)
    assert approx_equal(
        result.anomalous_info.plan_sad_experiment_stats.get_overall(
            item="anom_signal_dict"),
        15.35,
        eps=0.1)
    assert ("No significant pseudotranslation is detected"
            in result.patterson_verdict)
    # test consistency of output after pickling and unpickling
    try:
        from phenix_dev.phenix_cloud import xtriage_json
    except ImportError:
        pass
    else:
        json_out = xtriage_json.json_output("p9.sca")
        result.show(out=json_out)
        open("xtriage.json", "w").write(json_out.export())
    # unmerged data
    assert result.merging_stats is not None
    out = StringIO()
    result.merging_stats.show(out=out)
    assert ("R-merge: 0.073" in out.getvalue())
    assert approx_equal(result.estimate_d_min(min_i_over_sigma=10),
                        1.9645,
                        eps=0.001)
    # FIXME PDB doesn't actually have unit cell!
    # test detection of symmetry in reference file
    if (pdb_file is not None):
        args = [hkl_file, pdb_file]
        result = xtriage.run(args=args, out=null_out())
Example #3
0
  def __init__(self,
               miller_array,
               pre_scaling_protocol,
               basic_info,
               out=None):
    ## Make deep copy of the miller array of interest
    self.x1 = miller_array.deep_copy()
    self.options=pre_scaling_protocol
    self.basic_info= basic_info

    ## Determine unit_cell contents
    print(file=out)
    print("Matthews analyses", file=out)
    print("-----------------", file=out)
    print(file=out)
    print("Inspired by: Kantardjieff and Rupp. Prot. Sci. 12(9): 1865-1871 (2003).", file=out)
    matthews_analyses = matthews.matthews_rupp(
      crystal_symmetry = self.x1,
      n_residues = self.basic_info.n_residues,
      n_bases = self.basic_info.n_bases,
      out=out, verbose=1)
    n_residues=matthews_analyses[0]
    n_bases=matthews_analyses[1]
    n_copies_solc=matthews_analyses[2]

    if (self.basic_info.n_residues==None):
      self.basic_info.n_residues = n_residues
    if (self.basic_info.n_bases == None):
      self.basic_info.n_bases = n_bases


    ## apply resolution cut
    print(file=out)
    print("Applying resolution cut", file=out)
    print("-----------------------", file=out)

    if self.options.low_resolution is None:
      if self.options.high_resolution is None:
        print("No resolution cut is made", file=out)

    low_cut=float(1e6)
    if self.options.low_resolution is not None:
      low_cut = self.options.low_resolution
      print("Specified low resolution limit: %3.2f"%(
       float(self.options.low_resolution) ), file=out)

    high_cut = 0
    if self.options.high_resolution is not None:
      high_cut = self.options.high_resolution
      print("Specified high resolution limit: %3.2f"%(
       float(self.options.high_resolution) ), file=out)

    ## perform outlier analyses
    ##
    ## Do a simple outlier analyses please
    print(file=out)
    print("Wilson statistics based outlier analyses", file=out)
    print("----------------------------------------", file=out)
    print(file=out)
    native_outlier = data_statistics.possible_outliers(
      miller_array = self.x1,
      prob_cut_ex = self.options.outlier_level_extreme,
      prob_cut_wil = self.options.outlier_level_wilson )
    native_outlier.show(out=out)

    self.x1 = native_outlier.remove_outliers(
      self.x1 )

    ## apply anisotropic scaling  (final B-value will be set to b_add)!
    if self.options.aniso_correction:

      b_final = self.options.b_add
      if b_final is None:
        b_final = 0.0

      print(file=out)
      print("Anisotropic absolute scaling of data", file=out)
      print("--------------------------------------", file=out)
      print(file=out)

      aniso_correct = absolute_scaling.ml_aniso_absolute_scaling(
        miller_array = self.x1,
        n_residues = n_residues*\
        self.x1.space_group().order_z()*n_copies_solc,
        n_bases = n_bases*\
        self.x1.space_group().order_z()*n_copies_solc)
      aniso_correct.show(out=out,verbose=1)
      print(file=out)
      print("  removing anisotropy for native  ", file=out)
      print(file=out)
      u_star_correct_nat = aniso_correct.u_star
      self.x1 = absolute_scaling.anisotropic_correction(
        self.x1,
        aniso_correct.p_scale,
        u_star_correct_nat  )
Example #4
0
  def __init__(self,
               miller_array,
               pre_scaling_protocol,
               basic_info,
               out=None):
    ## Make deep copy of the miller array of interest
    self.x1 = miller_array.deep_copy()
    self.options=pre_scaling_protocol
    self.basic_info= basic_info

    ## Determine unit_cell contents
    print >> out
    print >> out, "Matthews analyses"
    print >> out, "-----------------"
    print >> out
    print >> out, "Inspired by: Kantardjieff and Rupp. Prot. Sci. 12(9): 1865-1871 (2003)."
    matthews_analyses = matthews.matthews_rupp(
      crystal_symmetry = self.x1,
      n_residues = self.basic_info.n_residues,
      n_bases = self.basic_info.n_bases,
      out=out, verbose=1)
    n_residues=matthews_analyses[0]
    n_bases=matthews_analyses[1]
    n_copies_solc=matthews_analyses[2]

    if (self.basic_info.n_residues==None):
      self.basic_info.n_residues = n_residues
    if (self.basic_info.n_bases == None):
      self.basic_info.n_bases = n_bases


    ## apply resolution cut
    print >> out
    print >> out, "Applying resolution cut"
    print >> out, "-----------------------"

    if self.options.low_resolution is None:
      if self.options.high_resolution is None:
        print >> out, "No resolution cut is made"

    low_cut=float(1e6)
    if self.options.low_resolution is not None:
      low_cut = self.options.low_resolution
      print >> out, "Specified low resolution limit: %3.2f"%(
       float(self.options.low_resolution) )

    high_cut = 0
    if self.options.high_resolution is not None:
      high_cut = self.options.high_resolution
      print >> out, "Specified high resolution limit: %3.2f"%(
       float(self.options.high_resolution) )

    ## perform outlier analyses
    ##
    ## Do a simple outlier analyses please
    print >> out
    print >> out, "Wilson statistics based outlier analyses"
    print >> out, "----------------------------------------"
    print >> out
    native_outlier = data_statistics.possible_outliers(
      miller_array = self.x1,
      prob_cut_ex = self.options.outlier_level_extreme,
      prob_cut_wil = self.options.outlier_level_wilson )
    native_outlier.show(out=out)

    self.x1 = native_outlier.remove_outliers(
      self.x1 )

    ## apply anisotropic scaling  (final B-value will be set to b_add)!
    if self.options.aniso_correction:

      b_final = self.options.b_add
      if b_final is None:
        b_final = 0.0

      print >> out
      print >> out, "Anisotropic absolute scaling of data"
      print >> out, "--------------------------------------"
      print >> out

      aniso_correct = absolute_scaling.ml_aniso_absolute_scaling(
        miller_array = self.x1,
        n_residues = n_residues*\
        self.x1.space_group().order_z()*n_copies_solc,
        n_bases = n_bases*\
        self.x1.space_group().order_z()*n_copies_solc)
      aniso_correct.show(out=out,verbose=1)
      print >> out
      print >> out, "  removing anisotropy for native  "
      print >> out
      u_star_correct_nat = aniso_correct.u_star
      self.x1 = absolute_scaling.anisotropic_correction(
        self.x1,
        aniso_correct.p_scale,
        u_star_correct_nat  )