Exemplo n.º 1
0
def exercise_end_of_line(exe_name, verbose):
  lines = """\
a
bc
def
ghij
klmno
""".splitlines()
  open("unix.txt", "wb").write("\n".join(lines)+"\n")
  open("dos.txt", "wb").write("\r\n".join(lines)+"\r\n")
  open("dos2.txt", "wb").write("\r\r\n".join(lines)+"\r\n")
  open("mac.txt", "wb").write("\r".join(lines)+"\r")
  from libtbx import easy_run
  from libtbx.utils import remove_files
  import os
  op = os.path
  expected_outputs = [
    "a   \nbc  \ndef \nghij\nklmn\n",
    "a   \nbc  \ndef \nghij\nklmn\n",
    "a\r  \nbc\r \ndef\r\nghij\nklmn\n",
    "a\rbc\n"]
  for vers,expected in zip(["unix", "dos", "dos2", "mac"], expected_outputs):
    remove_files(paths=["read_lines_out"])
    cmd = "%s < %s.txt > read_lines_out" % (op.join(".", exe_name), vers)
    if (verbose): print cmd
    easy_run.fully_buffered(command=cmd).raise_if_errors_or_output()
    assert op.isfile("read_lines_out")
    result = open("read_lines_out", "rb").read()
    assert result == expected.replace("\n", os.linesep)
Exemplo n.º 2
0
def exercise(space_group_info, anomalous_flag=False, d_min=2., verbose=0):
  sg_fcalc = random_structure.xray_structure(
    space_group_info,
    elements=("N", "C", "C", "O"),
    random_f_double_prime=anomalous_flag,
    random_u_iso=True,
    random_occupancy=True
    ).structure_factors(
      anomalous_flag=anomalous_flag, d_min=d_min, algorithm="direct").f_calc()
  sg_hl = generate_random_hl(sg_fcalc)
  write_cns_input(sg_fcalc, sg_hl.data())
  try: os.unlink("tmp_sg.hkl")
  except OSError: pass
  try: os.unlink("tmp_p1.hkl")
  except OSError: pass
  easy_run.fully_buffered(command="cns < tmp.cns > tmp.out") \
    .raise_if_errors_or_output()
  sg_cns = read_reflection_arrays("tmp_sg.hkl", anomalous_flag, verbose)
  p1_cns = read_reflection_arrays("tmp_p1.hkl", anomalous_flag, verbose)
  verify(sg_fcalc, sg_hl.data(), sg_cns, p1_cns)
  if (anomalous_flag):
    hl_merged = sg_hl.average_bijvoet_mates()
    fc_merged = sg_fcalc.average_bijvoet_mates()
    write_cns_input(sg_fcalc, sg_hl.data(), test_merge=True)
    try: os.unlink("tmp_merged.hkl")
    except OSError: pass
    easy_run.fully_buffered(command="cns < tmp.cns > tmp.out") \
      .raise_if_errors_or_output()
    reflection_file = reflection_reader.cns_reflection_file(
      open("tmp_merged.hkl"))
    if (not sg_fcalc.space_group().is_centric()):
      fc_merged_cns = reflection_file.reciprocal_space_objects["FCALC"]
      fc_merged_cns = fc_merged.customized_copy(
        indices=fc_merged_cns.indices,
        data=fc_merged_cns.data).map_to_asu().common_set(fc_merged)
      assert fc_merged_cns.indices().all_eq(fc_merged.indices())
      fc_merged_a = fc_merged.select_acentric()
      fc_merged_cns_a = fc_merged_cns.select_acentric()
      for part in [flex.real, flex.imag]:
        cc = flex.linear_correlation(
          part(fc_merged_a.data()),
          part(fc_merged_cns_a.data())).coefficient()
        if (cc < 1-1.e-6):
          print "FAILURE acentrics", sg_fcalc.space_group_info()
          if (0): return
          raise AssertionError
    names, miller_indices, hl = reflection_file.join_hl_group()
    assert names == ["PA", "PB", "PC", "PD"]
    hl_merged_cns = hl_merged.customized_copy(indices=miller_indices, data=hl)\
      .map_to_asu().common_set(hl_merged)
    assert hl_merged_cns.indices().all_eq(hl_merged.indices())
    for h,a,b in zip(hl_merged.indices(),
                     hl_merged.data(),
                     hl_merged_cns.data()):
      if (not approx_equal(a, b, eps=5.e-3)):
        print h
        print "cctbx:", a
        print "  cns:", b
        if (0): return
        raise AssertionError
Exemplo n.º 3
0
    def run(self):
        from libtbx import easy_run

        # Call dials.integrate
        easy_run.fully_buffered(["dials.plot_scan_varying_crystal", self.path]).raise_if_errors()

        print "OK"
Exemplo n.º 4
0
  def run(self):

    from os.path import join, exists
    from libtbx import easy_run
    import os

    input_filename = join(self.path, "datablock.json")
    output_filename = "output_datablock.json"
    mask_filename = join(self.path, "lookup_mask.pickle")

    easy_run.fully_buffered(
      ['dials.apply_mask',
       'input.datablock=%s' % input_filename,
       'input.mask=%s' % mask_filename,
       'output.datablock=%s' % output_filename]).raise_if_errors()

    from dxtbx.datablock import DataBlockFactory
    datablocks = DataBlockFactory.from_json_file(output_filename)

    assert len(datablocks) == 1
    imagesets = datablocks[0].extract_imagesets()
    assert len(imagesets) == 1
    imageset = imagesets[0]
    assert imageset.external_lookup.mask.filename == mask_filename

    print 'OK'
Exemplo n.º 5
0
  def run(self):

    from os.path import join, exists

    template = glob(join(self.directory, "centroid*.cbf"))
    args = [
      "dials.find_spots", ' '.join(template),
      "output.datablock=datablock.json",
      "output.reflections=spotfinder.pickle",
      "output.shoeboxes=True"
    ]
    result = easy_run.fully_buffered(command=" ".join(args)).raise_if_errors()

    assert exists("datablock.json")
    assert exists("spotfinder.pickle")

    args = [
      "dials.find_hot_pixels",
      "input.datablock=datablock.json",
      "input.reflections=spotfinder.pickle",
      "output.mask=hot_mask.pickle"
    ]
    result = easy_run.fully_buffered(command=" ".join(args)).raise_if_errors()

    assert exists("hot_mask.pickle")

    print 'OK'
  def run(self):
    from libtbx import easy_run
    from dials.array_family import flex

    # Call dials.merge_reflection_lists
    easy_run.fully_buffered([
      'dev.dials.merge_reflection_lists',
      'temp1.pickle',
      'temp2.pickle',
      'method=update'
    ]).raise_if_errors()

    table = flex.reflection_table.from_pickle('merged.pickle')
    assert(len(table) == 360)
    print 'OK'

    # Call dials.merge_reflection_lists
    easy_run.fully_buffered([
      'dev.dials.merge_reflection_lists',
      'temp1.pickle',
      'temp2.pickle',
      'method=extend'
    ]).raise_if_errors()

    table = flex.reflection_table.from_pickle('merged.pickle')
    assert(len(table) == 720)
    print 'OK'
Exemplo n.º 7
0
  def tst_scan_varying_prediction(self):
    from os.path import join
    from libtbx import easy_run

    # Call dials.predict
    easy_run.fully_buffered([
      'dials.predict',
      join(self.path, 'experiments_scan_varying_crystal.json'),
    ]).raise_if_errors()

    import cPickle as pickle
    table = pickle.load(open('predicted.pickle', 'rb'))
    assert(len(table) == 1934)
    print 'OK'

    # Check the reflection IDs
    assert('id' in table)
    assert('miller_index' in table)
    assert('s1' in table)
    assert('xyzcal.px' in table)
    assert('xyzcal.mm' in table)
    for row in table:
      assert(row['id'] == 0)

    print 'OK'
Exemplo n.º 8
0
  def test4(self):
    from os.path import join
    from libtbx import easy_run
    import os

    dirname ='test4'
    os.mkdir(dirname)
    os.chdir(dirname)

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.integrate',
      join(self.path, 'experiments.json'),
      'profile.fitting=False',
      'sampling.integrate_all_reflections=False',
    ]).raise_if_errors()

    import cPickle as pickle
    table = pickle.load(open('integrated.pickle', 'rb'))
    assert len(table) == 1000

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.integrate',
      join(self.path, 'experiments.json'),
      'profile.fitting=False',
      'sampling.integrate_all_reflections=False',
      'sampling.minimum_sample_size=500',
    ]).raise_if_errors()

    import cPickle as pickle
    table = pickle.load(open('integrated.pickle', 'rb'))
    assert len(table) == 500

    print 'OK'
Exemplo n.º 9
0
  def tst_import_integrate_hkl(self):
    from dials.array_family import flex # import dependency
    from os.path import join
    from libtbx import easy_run

    # Call dials.import_xds
    easy_run.fully_buffered([
      'dials.import_xds',
      'input.method=reflections',
      join(self.path, 'INTEGRATE.HKL'),
      join(self.path, "experiments.json")
    ]).raise_if_errors()

    import cPickle as pickle
    table = pickle.load(open('integrate_hkl.pickle', 'rb'))

    assert('miller_index' in table)
    assert('id' in table)
    assert('panel' in table)
    assert('xyzcal.px' in table)
    assert('xyzobs.px.value' in table)
    assert('intensity.cor.value' in table)
    assert('intensity.cor.variance' in table)
    assert(len(table) == 174911)
    print 'OK'
Exemplo n.º 10
0
def exercise_export_bitmaps():
    if dials_regression is None:
        print "Skipping exercise_export_bitmaps(): dials_regression not available"
        return

    data_dir = os.path.join(dials_regression, "centroid_test_data")

    cmd = "dials.export_bitmaps %s/centroid_0001.cbf" % data_dir
    print cmd
    result = easy_run.fully_buffered(cmd).raise_if_errors()

    assert os.path.exists("image0001.png")

    cmd = " ".join(
        [
            "dials.export_bitmaps",
            "%s/datablock.json" % data_dir,
            "prefix=variance_",
            "binning=2",
            "display=variance",
            "colour_scheme=inverse_greyscale",
            "brightness=25",
            "kernel_size=5,5",
        ]
    )
    print cmd
    result = easy_run.fully_buffered(cmd).raise_if_errors()

    for i in range(1, 8):
        assert os.path.exists("variance_000%i.png" % i)
Exemplo n.º 11
0
  def run(self):
    from os.path import join
    from libtbx import easy_run
    from dials.algorithms.profile_model.factory import phil_scope
    from libtbx.phil import parse
    from dxtbx.model.experiment.experiment_list import ExperimentListFactory

    # Call dials.create_profile_model
    easy_run.fully_buffered([
      'dials.create_profile_model',
      join(self.path, 'experiments.json'),
      join(self.path, 'indexed.pickle'),
    ]).raise_if_errors()


    experiments =  ExperimentListFactory.from_json_file(
      "experiments_with_profile_model.json",
      check_format=False)
    sigma_b = experiments[0].profile.sigma_b(deg=True)
    sigma_m = experiments[0].profile.sigma_m(deg=True)
    eps = 1e-3
    try:
      assert(abs(sigma_b - 0.02195) < eps)
      assert(abs(sigma_m - 0.06833) < eps)
    except Exception:
      print sigma_b
      print sigma_m
      raise
    print 'OK'
Exemplo n.º 12
0
def build_run(a_out, n_scatt, n_refl, build_cmd, check_max_a_b):
  if (op.isfile("a.out")):
    os.remove("a.out")
  assert not op.isfile("a.out")
  if (a_out is None):
    easy_run.fully_buffered(command=build_cmd).raise_if_errors_or_output()
  else:
    open("a.out", "w").write(a_out)
    os.chmod("a.out", 0755)
  assert op.isfile("a.out")
  run_cmd = "/usr/bin/time  -p ./a.out"
  buffers = easy_run.fully_buffered(command=run_cmd)
  assert len(buffers.stderr_lines) == 3
  if (n_scatt <= 10 and n_refl <= 100):
    assert len(buffers.stdout_lines) == n_scatt + n_refl
  else:
    assert len(buffers.stdout_lines) == 1
    max_a, max_b = [float(s) for s in buffers.stdout_lines[0].split()]
  if (check_max_a_b):
    if (n_scatt == 2000 and n_refl == 20000):
      assert approx_equal(max_a, 35.047157, eps=1e-4)
      assert approx_equal(max_b, 25.212738, eps=1e-4)
    elif (n_scatt == 100 and n_refl == 1000):
      assert approx_equal(max_a,  4.493645, eps=1e-4)
      assert approx_equal(max_b, 10.515532, eps=1e-4)
    elif (n_scatt <= 10 and n_refl <= 100):
      compare_with_cctbx_structure_factors(
        n_scatt=n_scatt,
        n_refl=n_refl,
        output_lines=buffers.stdout_lines)
    else:
      raise RuntimeError, (max_a, max_b)
  utime = float(buffers.stderr_lines[1].split()[1])
  return utime
Exemplo n.º 13
0
def exercise () :
  open("tmp_fmodel_fake_p1.pdb", "w").write("""\
ATOM     47  N   TYR A   7       8.292   1.817   6.147  1.00 14.70           N
ATOM     48  CA  TYR A   7       9.159   2.144   7.299  1.00 15.18           C
ATOM     49  C   TYR A   7      10.603   2.331   6.885  1.00 15.91           C
ATOM     50  O   TYR A   7      11.041   1.811   5.855  1.00 15.76           O
ATOM     51  CB  TYR A   7       9.061   1.065   8.369  1.00 15.35           C
ATOM     52  CG  TYR A   7       7.665   0.929   8.902  1.00 14.45           C
ATOM     53  CD1 TYR A   7       6.771   0.021   8.327  1.00 15.68           C
ATOM     54  CD2 TYR A   7       7.210   1.756   9.920  1.00 14.80           C
ATOM     55  CE1 TYR A   7       5.480  -0.094   8.796  1.00 13.46           C
ATOM     56  CE2 TYR A   7       5.904   1.649  10.416  1.00 14.33           C
ATOM     57  CZ  TYR A   7       5.047   0.729   9.831  1.00 15.09           C
ATOM     58  OH  TYR A   7       3.766   0.589  10.291  1.00 14.39           O
ATOM     59  OXT TYR A   7      11.358   2.999   7.612  1.00 17.49           O
""")
  args = ["phenix.fmodel", "tmp_fmodel_fake_p1.pdb", "high_resolution=2",
    "output.file_name=tmp_fmodel_fake_p1.mtz"]
  result = easy_run.fully_buffered(args)
  assert (result.return_code != 0) and (len(result.stderr_lines) > 0)
  args.append("generate_fake_p1_symmetry=True")
  result = easy_run.fully_buffered(args).raise_if_errors()
  assert (result.return_code == 0)
  assert os.path.isfile("tmp_fmodel_fake_p1.mtz")
  from iotbx import crystal_symmetry_from_any
  symm = crystal_symmetry_from_any.extract_from("tmp_fmodel_fake_p1.mtz")
  assert (str(symm.space_group_info()) == "P 1")
  # FIXME this should fail but doesn't due to a bug in the program
  #args.append("reference_file=tmp_fmodel_fake_p1.mtz")
  #args.append("data_column_label=FMODEL,PHIFMODEL")
  #result = easy_run.fully_buffered(args).raise_if_errors()
  #print result.return_code
  print "OK"
Exemplo n.º 14
0
  def test1(self):
    from os.path import join, exists
    from libtbx import easy_run
    import os
    from uuid import uuid4

    dirname ='tmp_%s' % uuid4().hex
    os.mkdir(dirname)
    os.chdir(dirname)

    assert exists(join(self.path, 'experiments.json'))

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.integrate',
      join(self.path, 'experiments.json'),
      'profile.fitting=False',
    ]).raise_if_errors()

    import cPickle as pickle
    table = pickle.load(open('integrated.pickle', 'rb'))
    print len(table)
    assert(len(table) == 751)

    # assert(len(table) == 764)
    assert('id' in table)
    for row in table:
      assert(row['id'] == 0)
    self.table = table
    print 'OK'
def exercise_compilation():
  ucif_dist = libtbx.env.dist_path(module_name="ucif")
  antlr3_dist = libtbx.env.under_dist("ucif", "antlr3")
  os.environ["LIBTBX_UCIF"] = ucif_dist
  os.environ["LIBTBX_ANTLR3"] = antlr3_dist
  assert ucif_dist.find('"') < 0
  if sys.platform == "win32":
    cmd = '"%s/examples/build_cif_parser.bat"' %ucif_dist
    ext = ".exe"
  else:
    cmd = '. "%s/examples/build_cif_parser.sh"' %ucif_dist
    ext = ""
  result = easy_run.fully_buffered(cmd)
  if result.return_code:
    if len(result.stderr_lines) > 0:
      raise RuntimeError, result.show_stderr()
    raise RuntimeError, result.show_stdout()
  assert os.path.exists("cif_parser"+ext)
  f = open_tmp_file(suffix=".cif")
  f.write(cif_string)
  f.close()
  cmd = 'cif_parser "%s"' %f.name
  cmd = os.path.join(".", cmd)
  r = easy_run.fully_buffered(cmd).raise_if_errors()
  assert r.stdout_lines[0].startswith("Congratulations!")
Exemplo n.º 16
0
def exercise_2(pdb = "enk_gbr_e.pdb", hkl = "enk_gbr.mtz"):
  pdb = libtbx.env.find_in_repositories(
                              relative_path="phenix_regression/pdb/enk_gbr_e.pdb", test=os.path.isfile)
  cmd = [
    "phenix.refine",
    pdb,
    hkl,
    "main.number_of_macro_cycles=3",
    "strategy=group_adp",
    "fake_f_obs.structure_factors_accuracy.cos_sin_table=false",
    "main.target=ls",
    "group_b_iso.use_restraints=False",
    "group_b_iso.run_finite_differences_test=true",
    "output.write_maps=false",
    "output.write_map_coefficients=false",
    "output.write_geo_file=true",
    "output.write_def_file=false",
    "group_adp_refinement_mode=group_selection",
    "output.write_eff_file=false",
    "structure_factors_and_gradients_accuracy.cos_sin_table=false",
    "structure_factors_and_gradients_accuracy.algorithm=direct",
    "fake_f_obs.structure_factors_accuracy.algorithm=direct",
    "main.scattering_table=wk1995",
    "fake_f_obs.scattering_table=wk1995",
    "--overwrite",
    "refinement.input.xray_data.labels=FOBS",
    "main.bulk_solvent_and_scale=false",
    "group_adp_refinement_mode=group_selection",
    'adp.group="chain A"',
    'adp.group="chain B"',
    'adp.group="chain C"',
    'adp.group="chain D"',
    "output.prefix=ref2"]
  easy_run.fully_buffered(cmd).raise_if_errors()
Exemplo n.º 17
0
def run_scons():
  import libtbx.load_env
  from libtbx import easy_run
  from libtbx.command_line import scons

  print; print; print '-'*80

  m = re.search(r"^(\d+ \. \d+ \. \d+) .*? \[\s*GCC\s* (\d+ \. \d+ \. \d+)",
                sys.version,
                re.X|re.M|re.S)
  print 'Python %s (compiled with gcc %s)' % m.groups()
  print

  print "** %s **" % libtbx.env.build_path.basename()
  libtbx.env.build_options.report()

  print
  print '-'*80
  for compiler in ('gcc', 'clang',):
    print easy_run.fully_buffered('type %s' % compiler).stdout_lines[0]
  print '-'*80

  os.chdir(os.environ['XCODE_CCTBX_BUILD'])
  sys.argv[1:] = os.environ['XCODE_SCONS_OPTIONS'].split()
  if os.environ.get('XCODE_SCONS_LIB_TARGET'):
    libs = os.environ['XCODE_SCONS_LIB_TARGET'].split()
    sys.argv.extend([ "lib/%s.so" % lib for lib in libs ])
    print "warning: only library being built: %s" % ', '.join(libs)
  elif os.environ.get('XCODE_SCONS_PROGRAM_TARGET'):
    programs = os.environ['XCODE_SCONS_PROGRAM_TARGET'].split()
    sys.argv.extend(programs)
    print "warning: only program being built: %s" % ', '.join(programs)
  scons.run()
Exemplo n.º 18
0
  def test2(self):
    from os.path import join
    from libtbx import easy_run
    import os

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.integrate',
      join(self.path, 'experiments.json'),
      'profile.fitting=False',
      'integration.integrator=3d',
    ]).raise_if_errors()

    import cPickle as pickle
    table = pickle.load(open('integrated.pickle', 'rb'))
    mask = table.get_flags(table.flags.integrated,all=False)
    assert(len(table) == 1996)
    assert(mask.count(True) == 1666)

    # assert(len(table) == 764)
    assert('id' in table)
    for row in table:
      assert(row['id'] == 0)
    self.table = table
    print 'OK'
Exemplo n.º 19
0
  def test_xds_ascii(self):
    from libtbx import easy_run
    from os.path import exists
    from libtbx.test_utils import approx_equal

    # Call dials.export
    easy_run.fully_buffered([
      'dials.export',
      'summation=true',
      'format=xds_ascii',
      self.experiments,
      self.reflections
    ]).raise_if_errors()

    assert exists("DIALS.HKL")

    psi_values = {
      (-9, 7, -10):153.430361,
      (-5, 11, -26):175.559441,
      (-4, 23, 24):129.468070,
      (2, 10, 20):147.947274
      }

    for record in open('DIALS.HKL', 'r'):
      if record.startswith('!'):
        continue
      tokens = record.split()
      hkl = tuple(map(int, tokens[:3]))
      if not hkl in psi_values:
        continue
      psi = float(tokens[-1])
      assert approx_equal(psi, psi_values[hkl], eps=0.1)

    print 'OK'
Exemplo n.º 20
0
  def tst_multiple_sweeps(self):
    from glob import glob
    import os
    from libtbx import easy_run
    from dxtbx.serialize import load

    # Find the image files
    image_files = sorted(glob(os.path.join(self.path, "centroid*.cbf")))
    del image_files[4] # Delete filename to force two sweeps
    image_files = ' '.join(image_files)

    # provide mosflm beam centre to dials.import
    cmd = 'dials.import %s output.datablock=datablock_multiple_sweeps.json' % (image_files)
    try:
      easy_run.fully_buffered(cmd).raise_if_errors()
      assert False, "Expected exception"
    except Exception:
      pass

    cmd = 'dials.import %s output.datablock=datablock_multiple_sweeps.json allow_multiple_sweeps=True' % (image_files)
    easy_run.fully_buffered(cmd).raise_if_errors()
    assert os.path.exists("datablock_multiple_sweeps.json")
    datablock = load.datablock("datablock_multiple_sweeps.json")[0]
    imgset = datablock.extract_imagesets()
    assert len(imgset) == 2

    print 'OK'
Exemplo n.º 21
0
  def test_sadabs(self):
    from libtbx import easy_run
    from os.path import exists
    from libtbx.test_utils import approx_equal

    # Call dials.export
    easy_run.fully_buffered([
      'dials.export',
      'summation=true',
      'format=sadabs',
      self.experiments,
      self.reflections
    ]).raise_if_errors()

    assert exists("integrated.sad")

    direction_cosines = {
      (-9, 7, -10):(0.51253, -0.72107, 0.84696, -0.68476, -0.14130, -0.10561),
      (-5, 11, -26):(0.51310, -0.62895, 0.84711, -0.59223, -0.13830, -0.50366),
      (-4, 23, 24):(0.51308, -0.60578, 0.84711, -0.31416, -0.13840, 0.73099),
      (2, 10, 20):(0.51239, -0.46605, 0.84693, -0.61521, -0.14204, 0.63586)
      }

    for record in open('integrated.sad', 'r'):
      record = record.replace('-', ' -')
      tokens = record.split()
      hkl = tuple(map(int, tokens[:3]))
      cosines = tuple(map(float, tokens[6:12]))
      if not hkl in direction_cosines:
        continue
      assert approx_equal(cosines, direction_cosines[hkl], eps=0.001)

    print 'OK'
Exemplo n.º 22
0
  def test_multi_lattice(self):
    from os.path import join
    from libtbx import easy_run
    import os

    dirname ='multi_sweep'
    os.mkdir(dirname)
    os.chdir(dirname)

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.integrate',
      join(self.integration_test_data, 'multi_lattice', 'experiments.json'),
      join(self.integration_test_data, 'multi_lattice', 'indexed.pickle')
    ]).raise_if_errors()

    import cPickle as pickle
    table = pickle.load(open('integrated.pickle', 'rb'))
    assert len(table) == 5604

    # Check output contains from two lattices
    exp_id = list(set(table['id']))
    assert len(exp_id) == 2

    # Check both lattices have integrated reflections
    mask = table.get_flags(table.flags.integrated_prf)
    table = table.select(mask)
    exp_id = list(set(table['id']))
    assert len(exp_id) == 2

    print 'OK'
Exemplo n.º 23
0
  def tst_import_beam_centre(self):
    from glob import glob
    import os
    from libtbx import easy_run
    from dxtbx.serialize import load

    # Find the image files
    image_files = glob(os.path.join(self.path, "centroid*.cbf"))
    image_files = ' '.join(image_files)

    # provide mosflm beam centre to dials.import
    cmd = 'dials.import %s mosflm_beam_centre=100,200 output.datablock=mosflm_beam_centre.json' %image_files
    easy_run.fully_buffered(cmd)
    assert os.path.exists("mosflm_beam_centre.json")
    datablock = load.datablock("mosflm_beam_centre.json")[0]
    imgset = datablock.extract_imagesets()[0]
    beam_centre = imgset.get_detector()[0].get_beam_centre(imgset.get_beam().get_s0())
    assert approx_equal(beam_centre, (200,100))

    # provide an alternative datablock.json to get geometry from
    cmd = 'dials.import %s reference_geometry=mosflm_beam_centre.json output.datablock=mosflm_beam_centre2.json' %image_files
    easy_run.fully_buffered(cmd)
    assert os.path.exists("mosflm_beam_centre2.json")
    datablock = load.datablock("mosflm_beam_centre2.json")[0]
    imgset = datablock.extract_imagesets()[0]
    beam_centre = imgset.get_detector()[0].get_beam_centre(imgset.get_beam().get_s0())
    assert approx_equal(beam_centre, (200,100))

    print 'OK'
Exemplo n.º 24
0
  def run(self):
    if os.path.isfile(self.out_file):
      os.remove(self.out_file)
    if self.command is None:
      cmd = 'prime.run {}'.format(self.prime_file, self.out_file)
    else:
      cmd = self.command

    easy_run.fully_buffered(cmd, join_stdout_stderr=True)
Exemplo n.º 25
0
  def run(self):

    from os.path import join, exists
    from libtbx import easy_run
    import os

    # Run a few commands from stdin
    stdin_lines = [
      "import template=%s" % join(self.path, "centroid_####.cbf"),
      "find_spots",
      "discover_better_experimental_model",
      "index",
      "refine_bravais_settings",
      "reindex solution=22",
      "refine",
      "goto 6",
    ]

    easy_run.fully_buffered(
      'idials',
      stdin_lines=stdin_lines).raise_if_errors()
    print 'OK'

    # Check that state works
    stdin_lines = [
      "refine",
      "integrate",
      "export",
      "goto 7",
      "integrate profile.fitting=False",
      "export",
    ]

    easy_run.fully_buffered('idials',
                            stdin_lines=stdin_lines).raise_if_errors()

    print 'OK'

    # Check all the stuff we expect, exists
    assert exists("dials.state")
    assert exists("dials-1")
    assert exists("10_integrated.mtz")
    assert exists("12_integrated.mtz")
    assert exists("dials-1/1_import")
    assert exists("dials-1/2_find_spots")
    assert exists("dials-1/3_discover_better_experimental_model")
    assert exists("dials-1/4_index")
    assert exists("dials-1/5_refine_bravais_settings")
    assert exists("dials-1/6_reindex")
    assert exists("dials-1/7_refine")
    assert exists("dials-1/8_refine")
    assert exists("dials-1/9_integrate")
    assert exists("dials-1/10_export")
    assert exists("dials-1/11_integrate")
    assert exists("dials-1/12_export")

    print 'OK'
Exemplo n.º 26
0
  def run(self):
    from libtbx import easy_run
    from dials.array_family import flex

    # Call dials.merge_reflection_lists
    easy_run.fully_buffered([
      'dev.dials.show_extensions',
    ]).raise_if_errors()

    print 'OK'
Exemplo n.º 27
0
  def run(self):
    if os.path.isfile(self.out_file):
      os.remove(self.out_file)
    if self.command is None:
      cmd = 'prime.run {} > {}'.format(self.prime_file, self.out_file)
    else:
      cmd = self.command

    easy_run.fully_buffered(cmd, join_stdout_stderr=True)
    evt = AllDone(tp_EVT_ALLDONE, -1)
    wx.PostEvent(self.parent, evt)
Exemplo n.º 28
0
def run(args):
    no_unzipsfx = len(args) > 0 and args[0] == "--no-unzipsfx"
    if no_unzipsfx:
        args = args[1:]
    single_dir = len(args) > 0 and args[0] == "--single_directory"
    if single_dir:
        args = args[1:]
    if len(args) < 2:
        from libtbx.utils import Usage
        import libtbx.load_env

        raise Usage(
            "%s [--no-unzipsfx] [--single_directory] bundle_prefix platform_string [addl_files...]"
            % libtbx.env.dispatcher_name
        )
    if os.name == "nt":
        exe_suffix = ".exe"
    else:
        exe_suffix = ""
    import libtbx.path

    path_zip = libtbx.path.full_command_path(command="zip" + exe_suffix, search_first=["."])
    if path_zip is None:
        raise RuntimeError("Fatal: zip executable not found.")
    bundle_prefix = args[0]
    if (single_dir) and (not os.path.isdir(bundle_prefix)):
        from libtbx.utils import Sorry

        raise Sorry("%s does not exist or is not a directory." % bundle_prefix)
    platform_string = args[1]
    addl_files = args[2:]
    zip_file_name = "%(bundle_prefix)s_%(platform_string)s.zip" % vars()
    open("autorun", "w").write(create_autorun(bundle_prefix, single_dir))
    if single_dir:
        cmd = ('"%(path_zip)s" -q -r -z %(zip_file_name)s' + " %(bundle_prefix)s") % vars()
    else:
        cmd = (
            '"%(path_zip)s" -q -r -z %(zip_file_name)s'
            + " %(bundle_prefix)s_sources"
            + " %(bundle_prefix)s_build"
            + " %(bundle_prefix)s_install_script.bat"
        ) % vars()
    for addl in addl_files:
        cmd += " " + addl
    cmd += " < autorun"
    print cmd
    from libtbx import easy_run

    easy_run.fully_buffered(command=cmd).raise_if_errors().show_stdout()
    if not no_unzipsfx:
        from libtbx.command_line import create_unzipsfx

        create_unzipsfx.create(zip_file_name=zip_file_name)
  def run(self):
    from os.path import join, exists
    from libtbx import easy_run
    import os

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.plot_scan_varying_crystal',
      self.path
    ]).raise_if_errors()

    print 'OK'
Exemplo n.º 30
0
  def tst_from_xds_files(self):
    from os.path import abspath, exists, join
    from libtbx import easy_run

    # Import from the image files
    easy_run.fully_buffered([
      'dials.import_xds',
      'input.method=experiment',
      'output.filename=import_experiments.json',
      join(self.path)]).raise_if_errors()

    assert(exists("import_experiments.json"))
    print 'OK'
Exemplo n.º 31
0
    def run(self):
        '''Execute the script.'''
        runs = [
            "r%04d" % r if isinstance(r, int) else r
            for r in self.params.striping.run
        ]
        if self.params.striping.run:
            print("processing runs " + ", ".join(runs))
        if self.params.striping.rungroup:
            print("processing rungroups " + ", ".join(
                ["rg%03d" % rg for rg in self.params.striping.rungroup]))
        batch_chunks = allocate_chunks(
            self.params.striping.results_dir,
            self.params.striping.trial,
            rgs_selected=[
                "rg%03d" % rg for rg in self.params.striping.rungroup
            ],
            respect_rungroup_barriers=self.params.striping.
            respect_rungroup_barriers,
            runs_selected=runs,
            stripe=self.params.striping.stripe,
            max_size=self.params.striping.chunk_size,
            integrated=self.params.combine_experiments.keep_integrated)
        self.dirname = os.path.join(
            self.params.striping.output_folder,
            "combine_experiments_t%03d" % self.params.striping.trial)
        self.intermediates = os.path.join(self.dirname, "intermediates")
        self.extracted = os.path.join(self.dirname, "final_extracted")
        for d in self.dirname, self.intermediates, self.extracted:
            if not os.path.isdir(d):
                os.mkdir(d)
        if self.params.striping.output_folder is None:
            self.params.striping.output_folder = os.getcwd()
        tag = "stripe" if self.params.striping.stripe else "chunk"
        all_commands = []
        for batch, ch_list in six.iteritems(batch_chunks):
            for idx in range(len(ch_list)):
                chunk = ch_list[idx]

                # reset for this chunk/stripe
                self.filename = "t%03d_%s_%s%03d" % (
                    self.params.striping.trial, batch, tag, idx)
                self.command_sequence = []

                # set up the file containing input expts and refls (logging)
                chunk_path = os.path.join(self.params.striping.output_folder,
                                          self.intermediates, self.filename)
                if os.path.isfile(chunk_path):
                    os.remove(chunk_path)
                with open(chunk_path, "wb") as outfile:
                    for i in (0, 1):  # expts then refls
                        outfile.write("\n".join(chunk[i]) + "\n")

                # set up the params for dials.combine_experiments
                custom_parts = ["  input {"]
                for expt_path in chunk[0]:
                    custom_parts.append("    experiments = %s" % expt_path)
                for refl_path in chunk[1]:
                    custom_parts.append("    reflections = %s" % refl_path)
                custom_parts.append("  }")
                self.set_up_section("combine_experiments",
                                    "dials.combine_experiments",
                                    clustering=False,
                                    custom_parts=custom_parts)

                # refinement of the grouped experiments
                self.set_up_section("refinement",
                                    "dials.refine",
                                    clustering=self.clustering)

                # refinement of the grouped experiments
                self.set_up_section("recompute_mosaicity",
                                    "cctbx.xfel.recompute_mosaicity",
                                    clustering=self.clustering)

                # reintegration
                if self.params.reintegration.enable:
                    custom_parts = [
                        "  integration.mp.nproc = %d" % self.params.mp.nproc
                    ]
                    self.set_up_section("reintegration",
                                        "dials.integrate",
                                        custom_parts=custom_parts,
                                        clustering=self.clustering)

                # extract results to integration pickles for merging
                if self.params.postprocessing.enable:
                    lambda_diff_str = lambda diff_str: (diff_str % \
                      (os.path.join("..", "final_extracted"))).replace("ITER", "%04d")
                    self.set_up_section("postprocessing",
                                        "cctbx.xfel.frame_extractor",
                                        lambda_diff_str=lambda_diff_str,
                                        clustering=self.clustering)

                # submit queued job from appropriate directory
                os.chdir(self.intermediates)
                command = " && ".join(self.command_sequence)
                if self.params.combine_experiments.clustering.dendrogram:
                    easy_run.fully_buffered(
                        command).raise_if_errors().show_stdout()
                else:
                    submit_folder = os.path.join(
                        self.params.striping.output_folder, self.intermediates)
                    submit_path = os.path.join(submit_folder,
                                               "combine_%s.sh" % self.filename)
                    submit_command = get_submit_command_chooser(
                        command,
                        submit_path,
                        self.intermediates,
                        self.params.mp,
                        log_name=os.path.splitext(
                            os.path.basename(submit_path))[0] + ".out",
                        err_name=os.path.splitext(
                            os.path.basename(submit_path))[0] + ".err",
                        root_dir=submit_folder)
                    all_commands.append(submit_command)
                    if not self.params.striping.dry_run:
                        print("executing command: %s" % submit_command)
                        try:
                            easy_run.fully_buffered(
                                submit_command).raise_if_errors().show_stdout(
                                )
                        except Exception as e:
                            if not "Warning: job being submitted without an AFS token." in str(
                                    e):
                                raise e
        return all_commands
Exemplo n.º 32
0
def run_command(
      command,
      verbose=0,
      buffered=True,
      log_file_name=None,
      stdout_file_name=None,
      result_file_names=[],
      show_diff_log_stdout=False,
      sorry_expected=False,
      join_stdout_stderr=False):
  """\
This function starts another process to run command, with some
pre-call and post-call processing.
Before running command, the expected output files are removed:

  log_file_name
  stdout_file_name
  result_file_names

After command is finished, log_file_name and stdout_file_name are scanned
for Traceback and Sorry. An exception is raised if there are any
matches. sorry_expected=True suppresses the scanning for Sorry.

With buffered=True easy_run.fully_buffered() is used. If there
is any output to stderr of the child process, an exception is
raised. The run_command() return value is the result of the
easy_run.fully_buffered() call.

With buffered=False easy_run.call() is used. I.e. stdout and stderr
of the command are connected to stdout and stderr of the parent
process. stderr is not checked. The run_command() return value is None.

It is generally best to use buffered=True, and even better not to
use this function at all if command is another Python script. It
is better to organize the command script such that it can be called
directly from within the same Python process running the unit tests.
"""
  assert verbose >= 0
  if (verbose > 0):
    print command
    print
    show_command_if_error = None
  else:
    show_command_if_error = command
  all_file_names = [log_file_name, stdout_file_name] + result_file_names
  for file_name in all_file_names:
    if (file_name is None): continue
    if (os.path.isfile(file_name)): os.remove(file_name)
    if (os.path.exists(file_name)):
      raise RunCommandError(
        "Unable to remove file: %s" % show_string(file_name))
  if (buffered):
    sys.stdout.flush()
    sys.stderr.flush()
    cmd_result = easy_run.fully_buffered(
      command=command,
      join_stdout_stderr=join_stdout_stderr)
    if (len(cmd_result.stderr_lines) != 0):
      if (verbose == 0):
        print command
        print
      print "\n".join(cmd_result.stdout_lines)
      cmd_result.raise_if_errors()
    _check_command_output(
      lines=cmd_result.stdout_lines,
      show_command_if_error=show_command_if_error,
      sorry_expected=sorry_expected)
  else:
    easy_run.call(command=command)
    cmd_result = None
  for file_name in [log_file_name, stdout_file_name]:
    if (file_name is None or not os.path.isfile(file_name)): continue
    _check_command_output(
      file_name=file_name,
      show_command_if_error=show_command_if_error,
      sorry_expected=sorry_expected)
  for file_name in all_file_names:
    if (file_name is None): continue
    if (not os.path.isfile(file_name)):
      raise RunCommandError(
        "Missing output file: %s" % show_string(file_name))
  if (verbose > 1 and cmd_result is not None):
    print "\n".join(cmd_result.stdout_lines)
    print
  if (    show_diff_log_stdout
      and log_file_name is not None
      and stdout_file_name is not None):
    if (verbose > 0):
      print "diff %s %s" % (show_string(log_file_name),
                            show_string(stdout_file_name))
      print
    if (show_diff(open(log_file_name).read(), open(stdout_file_name).read())):
      introspection.show_stack(
        frames_back=1, reverse=True, prefix="INFO_LOG_STDOUT_DIFFERENCE: ")
      print "ERROR_LOG_STDOUT_DIFFERENCE"
  sys.stdout.flush()
  return cmd_result
Exemplo n.º 33
0
    def __call__(O, file_info):
        from libtbx import easy_run
        from libtbx.str_utils import show_string
        from libtbx.test_utils import show_diff
        from six.moves import StringIO
        import os.path as op
        import sys
        opts = O.opts
        file_name, io_infos = file_info
        if (opts.verbose):
            print(file_name)
        file_path = op.join(O.test_valid, file_name)
        top_procedures = top_procedures_by_file_name.get(file_name)
        common_equivalence_simple_list = [
            set(common_equivalence_simple_by_file_name.get(file_name, []))
        ]
        if (len(common_equivalence_simple_list[0]) != 0):
            common_equivalence_simple_list.append([])
        for i_ces,common_equivalence_simple in \
              enumerate(common_equivalence_simple_list):
            common_report_stringio = StringIO()
            try:
                lines = fable.cout.process(
                    file_names=[file_path],
                    top_procedures=top_procedures,
                    dynamic_parameters=dynamic_parameters_by_file_name.get(
                        file_name),
                    common_equivalence_simple=common_equivalence_simple,
                    common_report_stringio=common_report_stringio)
            except Exception:
                if (not opts.keep_going): raise
                print("\nEXCEPTION: fable.cout.process([%s])\n" % file_name)
                return 1
            have_simple_equivalence = (
                "\n".join(lines).find(" // SIMPLE EQUIVALENCE") >= 0)
            if (len(common_equivalence_simple) != 0):
                assert have_simple_equivalence
            else:
                assert not have_simple_equivalence
            assert file_name.endswith(".f")
            base_name = file_name[:-2]
            if (len(common_equivalence_simple_list) != 1):
                base_name += "_alt%d" % i_ces
            fem_cpp = base_name + "_fem.cpp"
            fem_exe_name = fem_cpp[:-4] + O.comp_env.exe_suffix
            print("\n".join(lines), file=open(fem_cpp, "w"))
            if (opts.ifort):
                ifort_exe_name = base_name + "_ifort"
                ifort_cmd = "ifort -diag-disable 7000 -o %s %s" % (
                    ifort_exe_name, show_string(file_path))
            else:
                ifort_exe_name = None
                ifort_cmd = None
            if (opts.dry_run):
                return 0
            #
            n_failures = [0]

            def handle_exception(e):
                n_failures[0] += 1
                if (not opts.keep_going): raise
                print()
                print(str(e))
                print()
                sys.stdout.flush()

            #
            class BuildError(RuntimeError):
                pass

            try:
                O.comp_env.build(
                    link=True,
                    file_name_cpp=fem_cpp,
                    exe_name=fem_exe_name,
                    disable_warnings=(file_name
                                      in file_names_disable_warnings),
                    show_command=opts.verbose,
                    Error=BuildError)
            except BuildError as e:
                handle_exception(e)
                fem_exe_name = None
            #
            if (ifort_cmd is not None):
                if (opts.verbose):
                    print(ifort_cmd)
                buffers = easy_run.fully_buffered(command=ifort_cmd)
                try:
                    buffers.raise_if_errors_or_output(Error=BuildError)
                except BuildError as e:
                    handle_exception(e)
                    ifort_exe_name = None
            #
            for info in io_infos:
                if (info.skip_run):
                    if (opts.verbose):
                        print("Skipping run:", file_name)
                    continue
                if (len(info.inp_lines) != 0 and opts.verbose):
                    print("  number of input lines:", len(info.inp_lines))
                sys.stdout.flush()
                for exe_name in [fem_exe_name, ifort_exe_name]:
                    if (exe_name is None): continue
                    cmd = cmd0 = op.join(".", exe_name)
                    if (opts.valgrind):
                        cmd = "valgrind " + cmd
                    if (opts.verbose):
                        print(cmd)
                        sys.stdout.flush()
                    join_stdout_stderr = (opts.valgrind or
                                          (file_name
                                           in file_names_join_stdout_stderr))
                    buffers = easy_run.fully_buffered(
                        command=cmd,
                        stdin_lines=info.inp_lines,
                        join_stdout_stderr=join_stdout_stderr)
                    if (not join_stdout_stderr):

                        class ExeError(RuntimeError):
                            pass

                        try:
                            buffers.raise_if_errors(Error=ExeError)
                        except ExeError as e:
                            handle_exception(e)
                            buffers = None
                    if (buffers is not None):
                        text = "\n".join(buffers.stdout_lines)
                        if (opts.valgrind):
                            print(text)
                        else:

                            def check(text):
                                if (file_name == "intrinsics_extra.f"):
                                    check_intrinsics_extra(text)
                                    return
                                if (file_name == "sf.f"):
                                    text = text.replace(
                                        " -0.620088", " -0.620087")
                                elif (file_name == "unformatted_experiments.f"
                                      ):
                                    if (sys.byteorder == "big"):
                                        text = text \
                                          .replace(
                                            "        1234        5678",
                                            "        5678        1234") \
                                          .replace(
                                            "        18558553691448",
                                            "        23330262356193")
                                have_diffs = show_diff(
                                    text, "\n".join(info.out_lines))

                                def assert_not_have_diffs():
                                    if (opts.keep_going):
                                        print(
                                            "WARNING: --keep-going after show_diff:",
                                            exe_name)
                                    else:
                                        assert not have_diffs

                                if (have_diffs):
                                    if (exe_name is fem_exe_name):
                                        assert_not_have_diffs()
                                    elif (exe_name is ifort_exe_name):
                                        if (not info.ifort_diff_behavior
                                                and not info.
                                                ifort_diff_floating_point_format
                                            ):
                                            assert_not_have_diffs()
                                    else:
                                        raise AssertionError

                            check(text)

                    def run_with_args(args):
                        cmda = cmd0 + " " + args
                        if (opts.verbose):
                            print(cmda)
                            sys.stdout.flush()
                        result = easy_run.fully_buffered(
                            command=cmda, join_stdout_stderr=True)
                        if (opts.valgrind):
                            cmda = "valgrind " + cmda
                            if (opts.verbose):
                                print(cmda)
                                sys.stdout.flush()
                            buffers = easy_run.fully_buffered(
                                command=cmda, join_stdout_stderr=True)
                            print("\n".join(buffers.stdout_lines))
                        return result

                    if (file_name == "read_lines.f"):
                        exercise_end_of_line(exe_name=exe_name,
                                             verbose=opts.verbose)
                    elif (file_name == "dynamic_parameters_1.f"):
                        buffers = run_with_args("--fem-dynamic-parameters=5")
                        assert not show_diff(
                            buffers.stdout_lines, """\
          14          15          16          17          18          19
          20          21          22          23
""")
                        buffers = run_with_args("--fem-dynamic-parameters=5,6")
                        assert buffers.stdout_lines[0].endswith(
                            "Too many --fem-dynamic-parameters fields"
                            " (given: 2, max. expected: 1)")
                        buffers = run_with_args("--fem-dynamic-parameters=x")
                        assert buffers.stdout_lines[0].endswith(
                            'Invalid --fem-dynamic-parameters field (field 1): "x"'
                        )
                    elif (file_name == "intrinsics_iargc_getarg.f"):
                        buffers = run_with_args("D rP uWq")
                        assert not show_diff(
                            buffers.stdout_lines, "\n".join([
                                "A", "D   ", "rP  ", "uWq ", "B", "uWq ",
                                "rP  ", "D   ", "C", "rP  ", "uWq ", "D   "
                            ]) + "\n")
        #
        return n_failures[0]
Exemplo n.º 34
0
  def run(self):
    from os.path import join, exists
    from libtbx import easy_run

    assert(exists(join(self.path, "datablock.json")))

    input_filename = join(self.path, "datablock.json")

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
    ]).raise_if_errors()

    assert(exists("mask.pickle"))

    print 'OK'

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask2.pickle',
      'output.datablock=masked_datablock.json',
      'untrusted.rectangle=100,200,100,200'
    ]).raise_if_errors().show_stdout()
    assert(exists("mask2.pickle"))
    assert(exists("masked_datablock.json"))
    from dxtbx.serialize import load
    datablocks = load.datablock("masked_datablock.json")
    imageset = datablocks[0].extract_imagesets()[0]
    import os
    assert imageset.external_lookup.mask.filename == os.path.join(
      os.path.abspath(os.getcwd()), 'mask2.pickle')

    print 'OK'

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask3.pickle',
      'untrusted.circle=100,100,10'
    ]).raise_if_errors()
    assert(exists("mask3.pickle"))

    print 'OK'

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask4.pickle',
      'resolution_range=2,3',
    ]).raise_if_errors()
    assert(exists("mask4.pickle"))

    print 'OK'

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask5.pickle',
      'd_min=3',
      'd_max=2',
    ]).raise_if_errors()
    assert(exists("mask5.pickle"))

    print 'OK'

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask6.pickle',
      '\'ice_rings{filter=True;d_min=2}\'',
    ]).raise_if_errors()
    assert(exists("mask6.pickle"))

    print 'OK'

    # Call dials.integrate
    easy_run.fully_buffered([
      'dials.generate_mask',
      input_filename,
      'output.mask=mask3.pickle',
      'untrusted.polygon=100,100,100,200,200,200,200,100'
    ]).raise_if_errors()
    assert(exists("mask3.pickle"))

    print 'OK'
Exemplo n.º 35
0
 def run(self):
     command = '{} {}'.format(self.viewer, self.file_string)
     easy_run.fully_buffered(command)
Exemplo n.º 36
0
    def run_probe_clashscore(self, pdb_string):
        self.n_clashes = 0
        self.n_clashes_b_cutoff = 0
        self.clashscore_b_cutoff = None
        self.bad_clashes = []
        self.clashscore = None
        self.n_atoms = 0
        self.natoms_b_cutoff = 0

        probe_out = easy_run.fully_buffered(self.probe_txt,
                                            stdin_lines=pdb_string)
        if (probe_out.return_code != 0):
            raise RuntimeError("Probe crashed - dumping stderr:\n%s" %
                               "\n".join(probe_out.stderr_lines))
        probe_unformatted = probe_out.stdout_lines

        # Debugging facility, do not remove!
        # import random
        # tempdir = "tmp_for_probe_debug_%d" % random.randint(1000,9999)
        # while os.path.isdir(tempdir):
        #   tempdir = "tmp_for_probe_debug_%d" % random.randint(1000,9999)
        # os.mkdir(tempdir)
        # print "Dumping info to %s" % tempdir
        # with open(tempdir + os.sep + 'model.pdb', 'w') as f:
        #   f.write(pdb_string)
        # with open(tempdir + os.sep + 'probe_out.txt', 'w') as f:
        #   f.write('\n'.join(probe_unformatted))

        if not self.fast:
            temp = self.process_raw_probe_output(probe_unformatted)
            self.n_clashes = len(temp)
            # XXX Warning: one more probe call here
            printable_probe_out = easy_run.fully_buffered(
                self.full_probe_txt, stdin_lines=pdb_string)
            self.probe_unformatted = "\n".join(
                printable_probe_out.stdout_lines)
        else:
            self.n_clashes = self.get_condensed_clashes(probe_unformatted)

        # getting number of atoms from probe
        probe_info = easy_run.fully_buffered(
            self.probe_atom_txt,
            stdin_lines=pdb_string)  #.raise_if_errors().stdout_lines
        err = probe_info.format_errors_if_any()
        if err is not None and err.find("No atom data in input.") > -1:
            return
        #if (len(probe_info) == 0):
        #  raise RuntimeError("Empty PROBE output.")
        n_atoms = 0
        for line in probe_info.stdout_lines:
            try:
                dump, n_atoms = line.split(":")
            except KeyboardInterrupt:
                raise
            except ValueError:
                pass  # something else (different from expected) got into output
        self.n_atoms = int(n_atoms)
        if self.n_atoms == 0:
            self.clashscore = 0.0
        else:
            self.clashscore = (self.n_clashes * 1000) / self.n_atoms

        if not self.fast:
            # The rest is not necessary, we already got clashscore
            if self.b_factor_cutoff is not None:
                clashes_b_cutoff = 0
                for clash_obj in temp:
                    if clash_obj.max_b_factor < self.b_factor_cutoff:
                        clashes_b_cutoff += 1
                self.n_clashes_b_cutoff = clashes_b_cutoff
            used = []

            for clash_obj in sorted(temp):
                test_key = clash_obj.id_str_no_atom_name()
                test_key = clash_obj.id_str()
                if test_key not in used:
                    used.append(test_key)
                    self.bad_clashes.append(clash_obj)

            if self.probe_atom_b_factor is not None:
                probe_info_b_factor = easy_run.fully_buffered(
                    self.probe_atom_b_factor,
                    stdin_lines=pdb_string).raise_if_errors().stdout_lines
                for line in probe_info_b_factor:
                    dump_b, natoms_b_cutoff = line.split(":")
                self.natoms_b_cutoff = int(natoms_b_cutoff)
            self.clashscore_b_cutoff = None
            if self.natoms_b_cutoff == 0:
                self.clashscore_b_cutoff = 0.0
            else:
                self.clashscore_b_cutoff = \
                  (self.n_clashes_b_cutoff*1000) / self.natoms_b_cutoff
Exemplo n.º 37
0
        dials_regression = libtbx.env.dist_path('dials_regression')
    except KeyError, e:
        print 'FAIL: dials_regression not configured'
        exit(0)

    path = os.path.join(dials_regression,
                        "experiment_test_data/experiment_1.json")
    newpath = os.path.join(os.getcwd(), 'experiments.json')
    shutil.copyfile(path, newpath)
    for line in fileinput.FileInput(newpath, inplace=True):
        if '$DIALS_REGRESSION' in line:
            line = line.replace('$DIALS_REGRESSION', dials_regression)
        print line

    cmd = "dials.export format=mosflm %s" % newpath
    result = easy_run.fully_buffered(cmd).raise_if_errors()
    assert os.path.exists("mosflm/index.mat")
    with open("mosflm/index.mat", "rb") as f:
        lines = f.read()
        assert not show_diff(
            lines, """\
 -0.01210200 -0.01954526  0.00309519
 -0.00416605 -0.00080573 -0.02427340
  0.01931593 -0.01241956 -0.00329641
       0.000       0.000       0.000
 -0.52228050 -0.84350975  0.12535704
 -0.17980379 -0.03477015 -0.98308781
  0.83360283 -0.53598726 -0.13350648
     42.2717     42.2720     39.6704     90.0001     89.9993     89.9998
       0.000       0.000       0.000
""")
Exemplo n.º 38
0
def refine_expanding(params, merged_scope, combine_phil):
    assert params.start_at_hierarchy_level == 0
    if params.rmsd_filter.enable:
        input_name = "filtered"
        command = "cctbx.xfel.filter_experiments_by_rmsd %s %s output.filtered_experiments=%s output.filtered_reflections=%s"
        command = command % ("%s_combined_experiments.json" % params.tag,
                             "%s_combined_reflections.pickle" % params.tag,
                             "%s_filtered_experiments.json" % params.tag,
                             "%s_filtered_reflections.pickle" % params.tag)
        command += " iqr_multiplier=%f" % params.rmsd_filter.iqr_multiplier
        print command
        result = easy_run.fully_buffered(command=command).raise_if_errors()
        result.show_stdout()
    else:
        input_name = "combined"
    # --------------------------
    if params.panel_filter is not None:
        from libtbx import easy_pickle
        print "Filtering out all reflections except those on panels %s" % (
            ", ".join(["%d" % p for p in params.panel_filter]))
        combined_path = "%s_combined_reflections.pickle" % params.tag
        data = easy_pickle.load(combined_path)
        sel = None
        for panel_id in params.panel_filter:
            if sel is None:
                sel = data['panel'] == panel_id
            else:
                sel |= data['panel'] == panel_id
        print "Retaining", len(
            data.select(sel)), "out of", len(data), "reflections"
        easy_pickle.dump(combined_path, data.select(sel))
    # ----------------------------------
    # this is the order to refine the CSPAD in
    steps = {}
    steps[0] = [2, 3]
    steps[1] = steps[0] + [0, 1]
    steps[2] = steps[1] + [14, 15]
    steps[3] = steps[2] + [6, 7]
    steps[4] = steps[3] + [4, 5]
    steps[5] = steps[4] + [12, 13]
    steps[6] = steps[5] + [8, 9]
    steps[7] = steps[6] + [10, 11]

    for s, panels in steps.iteritems():
        rest = []
        for p in panels:
            rest.append(p + 16)
            rest.append(p + 32)
            rest.append(p + 48)
        panels.extend(rest)

    levels = {0: (0, 1)}  # levels 0 and 1
    for i in range(7):
        levels[i + 1] = (2, )  # level 2

    previous_step_and_level = None
    for j in range(8):
        from libtbx import easy_pickle
        print "Filtering out all reflections except those on panels %s" % (
            ", ".join(["%d" % p for p in steps[j]]))
        combined_path = "%s_%s_reflections.pickle" % (params.tag, input_name)
        output_path = "%s_reflections_step%d.pickle" % (params.tag, j)
        data = easy_pickle.load(combined_path)
        sel = None
        for panel_id in steps[j]:
            if sel is None:
                sel = data['panel'] == panel_id
            else:
                sel |= data['panel'] == panel_id
        print "Retaining", len(
            data.select(sel)), "out of", len(data), "reflections"
        easy_pickle.dump(output_path, data.select(sel))

        for i in levels[j]:
            print "Step", j, "refining at hierarchy level", i
            refine_phil_file = "%s_refine_step%d_level%d.phil" % (params.tag,
                                                                  j, i)
            if i == 0:
                if params.refine_distance:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Tau1"  # fix detector rotz
                else:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Dist,Tau1"  # fix detector rotz, distance
                if params.flat_refinement:
                    diff_phil += ",Tau2,Tau3"  # Also fix x and y rotations
                diff_phil += "\n"
                if params.refine_energy:
                    diff_phil += "refinement.parameterisation.beam.fix=in_spindle_plane+out_spindle_plane\n"  # allow energy to refine
            else:
                # Note, always need to fix something, so pick a panel group and fix its Tau1 (rotation around Z) always
                if params.flat_refinement and params.flat_refinement_with_distance:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1,Tau2,Tau3\n"  # refine distance, rotz and xy translation
                    diff_phil += "refinement.parameterisation.detector.constraints.parameter=Dist\n"  # constrain distance to be refined identically for all panels at this hierarchy level
                elif params.flat_refinement:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Dist,Group1Tau1,Tau2,Tau3\n"  # refine only rotz and xy translation
                else:
                    diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1\n"  # refine almost everything

            if previous_step_and_level is None:
                command = "dials.refine %s %s_%s_experiments.json %s_reflections_step%d.pickle"%( \
                  refine_phil_file, params.tag, input_name, params.tag, j)
            else:
                p_step, p_level = previous_step_and_level
                if p_step == j:
                    command = "dials.refine %s %s_refined_experiments_step%d_level%d.json %s_refined_reflections_step%d_level%d.pickle"%( \
                      refine_phil_file, params.tag, p_step, p_level, params.tag, p_step, p_level)
                else:
                    command = "dials.refine %s %s_refined_experiments_step%d_level%d.json %s_reflections_step%d.pickle"%( \
                      refine_phil_file, params.tag, p_step, p_level, params.tag, j)

            diff_phil += "refinement.parameterisation.detector.hierarchy_level=%d\n" % i

            output_experiments = "%s_refined_experiments_step%d_level%d.json" % (
                params.tag, j, i)
            command += " output.experiments=%s output.reflections=%s_refined_reflections_step%d_level%d.pickle"%( \
              output_experiments, params.tag, j, i)

            scope = merged_scope.fetch(parse(diff_phil))
            f = open(refine_phil_file, 'w')
            f.write(refine_scope.fetch_diff(scope).as_str())
            f.close()

            print command
            result = easy_run.fully_buffered(command=command).raise_if_errors()
            result.show_stdout()

            # In expanding mode, if using flat refinement with distance, after having refined this step as a block, unrefined
            # panels will have been left behind.  Read back the new metrology, compute the shift applied to the panels refined
            # in this step,and apply that shift to the unrefined panels in this step
            if params.flat_refinement and params.flat_refinement_with_distance and i > 0:
                from dxtbx.model.experiment_list import ExperimentListFactory, ExperimentListDumper
                from xfel.command_line.cspad_detector_congruence import iterate_detector_at_level, iterate_panels
                from scitbx.array_family import flex
                from scitbx.matrix import col
                from libtbx.test_utils import approx_equal
                experiments = ExperimentListFactory.from_json_file(
                    output_experiments, check_format=False)
                assert len(experiments.detectors()) == 1
                detector = experiments.detectors()[0]
                # Displacements: deltas along the vector normal to the detector
                displacements = flex.double()
                # Iterate through the panel groups at this level
                for panel_group in iterate_detector_at_level(
                        detector.hierarchy(), 0, i):
                    # Were there panels refined in this step in this panel group?
                    if params.panel_filter:
                        test = [
                            list(detector).index(panel) in steps[j]
                            for panel in iterate_panels(panel_group) if list(
                                detector).index(panel) in params.panel_filter
                        ]
                    else:
                        test = [
                            list(detector).index(panel) in steps[j]
                            for panel in iterate_panels(panel_group)
                        ]
                    if not any(test): continue
                    # Compute the translation along the normal of this panel group.  This is defined as distance in dials.refine
                    displacements.append(
                        col(panel_group.get_local_fast_axis()).cross(
                            col(panel_group.get_local_slow_axis())).dot(
                                col(panel_group.get_local_origin())))

                # Even though the panels are constrained to move the same amount, there is a bit a variation.
                stats = flex.mean_and_variance(displacements)
                displacement = stats.mean()
                print "Average displacement along normals: %f +/- %f" % (
                    stats.mean(), stats.unweighted_sample_standard_deviation())

                # Verify the variation isn't significant
                for k in range(1, len(displacements)):
                    assert approx_equal(displacements[0], displacements[k])
                # If all of the panel groups in this level moved, no need to do anything.
                if len(displacements) != len(
                        list(
                            iterate_detector_at_level(detector.hierarchy(), 0,
                                                      i))):
                    for panel_group in iterate_detector_at_level(
                            detector.hierarchy(), 0, i):
                        if params.panel_filter:
                            test = [
                                list(detector).index(panel) in steps[j]
                                and list(detector).index(panel)
                                in params.panel_filter
                                for panel in iterate_panels(panel_group)
                            ]
                        else:
                            test = [
                                list(detector).index(panel) in steps[j]
                                for panel in iterate_panels(panel_group)
                            ]
                        # If any of the panels in this panel group moved, no need to do anything
                        if any(test): continue

                        # None of the panels in this panel group moved in this step, so need to apply displacement from other panel
                        # groups at this level
                        fast = col(panel_group.get_local_fast_axis())
                        slow = col(panel_group.get_local_slow_axis())
                        ori = col(panel_group.get_local_origin())
                        normal = fast.cross(slow)
                        panel_group.set_local_frame(
                            fast, slow, (ori.dot(fast) * fast) +
                            (ori.dot(slow) * slow) + (normal * displacement))

                # Check the new displacements. Should be the same across all panels.
                displacements = []
                for panel_group in iterate_detector_at_level(
                        detector.hierarchy(), 0, i):
                    displacements.append(
                        col(panel_group.get_local_fast_axis()).cross(
                            col(panel_group.get_local_slow_axis())).dot(
                                col(panel_group.get_local_origin())))

                for k in range(1, len(displacements)):
                    assert approx_equal(displacements[0], displacements[k])

                dump = ExperimentListDumper(experiments)
                dump.as_json(output_experiments)

            previous_step_and_level = j, i

    output_geometry(params)
Exemplo n.º 39
0
 def query(self, submission_id):
     if self.queueing_system in ["mpi", "lsf"]:
         result = easy_run.fully_buffered(command=self.command % \
           (submission_id, submission_id))
     elif self.queueing_system == 'pbs':
         result = easy_run.fully_buffered(command=self.command %
                                          submission_id)
     elif self.queueing_system == 'local':
         import psutil
         try:
             process = psutil.Process(int(submission_id))
         except psutil.NoSuchProcess:
             return "DONE"
         statuses = [
             p.status()
             for p in [process] + process.children(recursive=True)
         ]
         if 'running' in statuses: return "RUN"
         if 'sleeping' in statuses: return "RUN"
         # zombie jobs can be left because the GUI process that forked them is still running
         if len(statuses) == 1 and statuses[0] == 'zombie': return "DONE"
         return ", ".join(statuses)
     elif self.queueing_system == 'slurm' or self.queueing_system == "shifter":
         # The current implementation of the shifter mp method assumes that we're
         # running on NERSC's systems => jobs should be tracked using the _slurm_
         # submission tracker.
         result = easy_run.fully_buffered(command=self.command %
                                          submission_id)
         if len(result.stdout_lines) == 0: return 'UNKWN'
         status = result.stdout_lines[0].strip().rstrip('+')
         statuses = {
             'COMPLETED': 'DONE',
             'COMPLETING': 'RUN',
             'FAILED': 'EXIT',
             'PENDING': 'PEND',
             'PREEMPTED': 'SUSP',
             'RUNNING': 'RUN',
             'SUSPENDED': 'SUSP',
             'STOPPED': 'SUSP',
             'CANCELLED': 'EXIT',
         }
         return statuses[status] if status in statuses else 'UNKWN'
     elif self.queueing_system == 'htcondor':
         # (copied from the man page)
         # H = on hold, R = running, I = idle (waiting for a machine to execute on), C = completed,
         # X = removed, S = suspended (execution of a running job temporarily suspended on execute node),
         # < = transferring input (or queued to do so), and > = transferring output (or queued to do so).
         statuses = {
             'H': 'HOLD',
             'R': 'RUN',
             'I': 'PEND',
             'C': 'DONE',
             'X': 'DONE',
             'S': 'SUSP',
             '<': 'RUN',
             '>': 'RUN'
         }
         for c in [self.command1, self.command2]:
             result = easy_run.fully_buffered(command=c % submission_id)
             if len(result.stdout_lines) != 1 or len(
                     result.stdout_lines[0]) == 0:
                 continue
             status = result.stdout_lines[0].split()[5]
             return statuses[status] if status in statuses else 'UNKWN'
         return 'ERR'
     status = "\n".join(result.stdout_lines)
     error = "\n".join(result.stderr_lines)
     if error != "" and not "Warning: job being submitted without an AFS token." in error:
         if "not found" in error:
             return "ERR"
         else:
             return error
     else:
         return status
Exemplo n.º 40
0
def refine_hierarchical(params, merged_scope, combine_phil):
    if params.panel_filter is not None:
        from libtbx import easy_pickle
        print "Filtering out all reflections except those on panels %s" % (
            ", ".join(["%d" % p for p in params.panel_filter]))
        combined_path = "%s_combined_reflections.pickle" % params.tag
        data = easy_pickle.load(combined_path)
        sel = None
        for panel_id in params.panel_filter:
            if sel is None:
                sel = data['panel'] == panel_id
            else:
                sel |= data['panel'] == panel_id
        print "Retaining", len(
            data.select(sel)), "out of", len(data), "reflections"
        easy_pickle.dump(combined_path, data.select(sel))

    for i in range(params.start_at_hierarchy_level,
                   params.refine_to_hierarchy_level + 1):
        if params.rmsd_filter.enable:
            input_name = "filtered"
        else:
            if i == params.start_at_hierarchy_level:
                input_name = "combined"
            else:
                input_name = "refined"

        if params.rmsd_filter.enable:
            command = "cctbx.xfel.filter_experiments_by_rmsd %s %s output.filtered_experiments=%s output.filtered_reflections=%s"
            if i == params.start_at_hierarchy_level:
                command = command % (
                    "%s_combined_experiments.json" % params.tag,
                    "%s_combined_reflections.pickle" % params.tag,
                    "%s_filtered_experiments.json" % params.tag,
                    "%s_filtered_reflections.pickle" % params.tag)
            else:
                command = command % ("%s_refined_experiments_level%d.json" %
                                     (params.tag, i - 1),
                                     "%s_refined_reflections_level%d.pickle" %
                                     (params.tag, i - 1),
                                     "%s_filtered_experiments_level%d.json" %
                                     (params.tag, i - 1),
                                     "%s_filtered_reflections_level%d.pickle" %
                                     (params.tag, i - 1))
            command += " iqr_multiplier=%f" % params.rmsd_filter.iqr_multiplier
            print command
            result = easy_run.fully_buffered(command=command).raise_if_errors()
            result.show_stdout()

        print "Refining at hierarchy level", i
        refine_phil_file = "%s_refine_level%d.phil" % (params.tag, i)
        if i == 0:
            fix_list = ['Tau1']  # fix detector rotz
            if not params.refine_distance:
                fix_list.append('Dist')
            if params.flat_refinement:
                fix_list.extend(['Tau2', 'Tau3'])

            diff_phil = "refinement.parameterisation.detector.fix_list=%s\n" % ",".join(
                fix_list)
            if params.refine_energy:
                diff_phil += " refinement.parameterisation.beam.fix=in_spindle_plane+out_spindle_plane\n"  # allow energy to refine
        else:
            # Note, always need to fix something, so pick a panel group and fix its Tau1 (rotation around Z) always
            if params.flat_refinement and params.flat_refinement_with_distance:
                diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1,Tau2,Tau3\n"  # refine distance, rotz and xy translation
                diff_phil += "refinement.parameterisation.detector.constraints.parameter=Dist\n"  # constrain distance to be refined identically for all panels at this hierarchy level
            elif params.flat_refinement:
                diff_phil = "refinement.parameterisation.detector.fix_list=Dist,Group1Tau1,Tau2,Tau3\n"  # refine only rotz and xy translation
            else:
                diff_phil = "refinement.parameterisation.detector.fix_list=Group1Tau1\n"  # refine almost everything

        if i == params.start_at_hierarchy_level:
            command = "dials.refine %s %s_%s_experiments.json %s_%s_reflections.pickle" % (
                refine_phil_file, params.tag, input_name, params.tag,
                input_name)
        else:
            command = "dials.refine %s %s_%s_experiments_level%d.json %s_%s_reflections_level%d.pickle" % (
                refine_phil_file, params.tag, input_name, i - 1, params.tag,
                input_name, i - 1)

        diff_phil += "refinement.parameterisation.detector.hierarchy_level=%d\n" % i

        command += " output.experiments=%s_refined_experiments_level%d.json output.reflections=%s_refined_reflections_level%d.pickle"%( \
          params.tag, i, params.tag, i)

        scope = merged_scope.fetch(parse(diff_phil))
        f = open(refine_phil_file, 'w')
        f.write(refine_scope.fetch_diff(scope).as_str())
        f.close()

        print command
        result = easy_run.fully_buffered(command=command).raise_if_errors()
        result.show_stdout()

    output_geometry(params)
Exemplo n.º 41
0
    def run_probe_clashscore(self, pdb_string):
        probe_out = easy_run.fully_buffered(self.probe_txt,
                                            stdin_lines=pdb_string)
        if (probe_out.return_code != 0):
            raise RuntimeError("Probe crashed - dumping stderr:\n%s" %
                               "\n".join(probe_out.stderr_lines))
        probe_unformatted = probe_out.stdout_lines
        printable_probe_out = easy_run.fully_buffered(self.full_probe_txt,
                                                      stdin_lines=pdb_string)
        self.probe_unformatted = "\n".join(printable_probe_out.stdout_lines)

        temp = self.process_raw_probe_output(probe_unformatted)

        self.n_clashes = len(temp)
        if self.b_factor_cutoff is not None:
            clashes_b_cutoff = 0
            for clash_obj in temp:
                if clash_obj.max_b_factor < self.b_factor_cutoff:
                    clashes_b_cutoff += 1
            self.n_clashes_b_cutoff = clashes_b_cutoff
        used = []
        self.bad_clashes = []
        for clash_obj in sorted(temp):
            test_key = clash_obj.id_str_no_atom_name()
            test_key = clash_obj.id_str()
            if test_key not in used:
                used.append(test_key)
                self.bad_clashes.append(clash_obj)
        probe_info = easy_run.fully_buffered(
            self.probe_atom_txt,
            stdin_lines=pdb_string)  #.raise_if_errors().stdout_lines
        err = probe_info.format_errors_if_any()
        if err is not None and err.find("No atom data in input.") > -1:
            self.clashscore = None
            self.clashscore_b_cutoff = None
            return
        #if (len(probe_info) == 0) :
        #  raise RuntimeError("Empty PROBE output.")
        self.n_atoms = 0
        n_atoms = 0
        for line in probe_info.stdout_lines:
            processed = False
            try:
                dump, n_atoms = line.split(":")
            except KeyboardInterrupt:
                raise
            except ValueError:
                pass  # something else (different from expected) got into output
        self.n_atoms = int(n_atoms)
        self.natoms_b_cutoff = None
        if self.probe_atom_b_factor is not None:
            probe_info_b_factor = easy_run.fully_buffered(
                self.probe_atom_b_factor,
                stdin_lines=pdb_string).raise_if_errors().stdout_lines
            for line in probe_info_b_factor:
                dump_b, natoms_b_cutoff = line.split(":")
            self.natoms_b_cutoff = int(natoms_b_cutoff)
        if self.n_atoms == 0:
            clashscore = 0.0
        else:
            clashscore = (self.n_clashes * 1000) / self.n_atoms
        self.clashscore = clashscore
        clashscore_b_cutoff = None
        if self.natoms_b_cutoff is not None and self.natoms_b_cutoff == 0:
            clashscore_b_cutoff = 0.0
        elif self.natoms_b_cutoff is not None:
            clashscore_b_cutoff = \
              (self.n_clashes_b_cutoff*1000) / self.natoms_b_cutoff
        self.clashscore_b_cutoff = clashscore_b_cutoff
Exemplo n.º 42
0
def check_and_add_hydrogen(pdb_hierarchy=None,
                           file_name=None,
                           nuclear=False,
                           keep_hydrogens=True,
                           verbose=False,
                           model_number=0,
                           n_hydrogen_cut_off=0,
                           time_limit=120,
                           allow_multiple_models=True,
                           crystal_symmetry=None,
                           do_flips=False,
                           log=None):
    """
  If no hydrogens present, force addition for clashscore calculation.
  Use REDUCE to add the hydrogen atoms.

  Args:
    pdb_hierarchy : pdb hierarchy
    file_name (str): pdb file name
    nuclear (bool): When True use nuclear cloud x-H distances and vdW radii,
      otherwise use electron cloud x-H distances and vdW radii
    keep_hydrogens (bool): when True, if there are hydrogen atoms, keep them
    verbose (bool): verbosity of printout
    model_number (int): the number of model to use
    time_limit (int): limit the time it takes to add hydrogen atoms
    n_hydrogen_cut_off (int): when number of hydrogen atoms < n_hydrogen_cut_off
      force keep_hydrogens tp True
    allow_multiple_models (bool): Allow models that contain more than one model
    crystal_symmetry : must provide crystal symmetry when using pdb_hierarchy

  Returns:
    (str): PDB string
    (bool): True when PDB string was updated
  """
    if not log: log = sys.stdout
    if file_name:
        pdb_inp = iotbx.pdb.input(file_name=file_name)
        pdb_hierarchy = pdb_inp.construct_hierarchy()
        cryst_sym = pdb_inp.crystal_symmetry()
    elif not allow_multiple_models:
        assert crystal_symmetry
        cryst_sym = crystal_symmetry
    else:
        cryst_sym = None
    assert pdb_hierarchy
    assert model_number < len(pdb_hierarchy.models())
    models = pdb_hierarchy.models()
    if (len(models) > 1) and (not allow_multiple_models):
        raise Sorry(
            "When using CCTBX clashscore, provide only a single model.")
    model = models[model_number]
    r = iotbx.pdb.hierarchy.root()
    mdc = model.detached_copy()
    r.append_model(mdc)
    if keep_hydrogens:
        elements = r.atoms().extract_element()
        # strangely the elements can have a space when coming from phenix.clashscore
        # but no space when coming from phenix.molprobity
        h_count = elements.count('H')
        if h_count <= n_hydrogen_cut_off: h_count += elements.count(' H')
        if h_count <= n_hydrogen_cut_off: h_count += elements.count('D')
        if h_count <= n_hydrogen_cut_off: h_count += elements.count(' D')
        if h_count > n_hydrogen_cut_off:
            has_hd = True
        else:
            has_hd = False
        if not has_hd:
            if verbose:
                print >> log, "\nNo H/D atoms detected - forcing hydrogen addition!\n"
            keep_hydrogens = False
    import libtbx.load_env
    has_reduce = libtbx.env.has_module(name="reduce")
    # add hydrogen if needed
    if has_reduce and (not keep_hydrogens):
        # set reduce running parameters
        build = "phenix.reduce -oh -his -flip -keep -allalt -limit{}"
        if not do_flips: build += " -pen9999"
        if nuclear:
            build += " -nuc -"
        else:
            build += " -"
        build = build.format(time_limit)
        trim = "phenix.reduce -quiet -trim -"
        stdin_lines = r.as_pdb_string(cryst_sym)
        clean_out = easy_run.fully_buffered(trim, stdin_lines=stdin_lines)
        if (clean_out.return_code != 0):
            msg_str = "Reduce crashed with command '%s' - dumping stderr:\n%s"
            raise Sorry(msg_str % (trim, "\n".join(clean_out.stderr_lines)))
        build_out = easy_run.fully_buffered(build,
                                            stdin_lines=clean_out.stdout_lines)
        if (build_out.return_code != 0):
            msg_str = "Reduce crashed with command '%s' - dumping stderr:\n%s"
            raise Sorry(msg_str % (build, "\n".join(build_out.stderr_lines)))
        reduce_str = '\n'.join(build_out.stdout_lines)
        return reduce_str, True
    else:
        if not has_reduce:
            msg = 'phenix.reduce could not be detected on your system.\n'
            msg += 'Cannot add hydrogen to PDB file'
            print >> log, msg
        return r.as_pdb_string(cryst_sym), False
Exemplo n.º 43
0
import inspect
import os
import re
import sys
import warnings

from libtbx import cpp_function_name

symbol_not_found_pat = re.compile(
  r"[Ss]ymbol[ ]not[ ]found: \s* (\w+) $", re.X | re.M | re.S)

python_libstdcxx_so = None
if (sys.platform.startswith("linux")):
  from libtbx import easy_run
  for line in easy_run.fully_buffered(
                command='/usr/bin/ldd "%s"' % sys.executable).stdout_lines:
    if (line.strip().startswith("libstdc++.so")):
      python_libstdcxx_so = line.split()[0]
      break

def import_ext(name, optional=False):
  components = name.split(".")
  if (len(components) > 1):
    __import__(".".join(components[:-1]))
  previous_dlopenflags = None
  if (sys.platform.startswith("linux")) :
    previous_dlopenflags = sys.getdlopenflags()
    sys.setdlopenflags(0x100|0x2)
  try: mod = __import__(name)
  except ImportError as e:
    if (optional): return None
def exercise():
    from mmtbx.command_line import cif_as_mtz
    from iotbx import file_reader
    cif_in = """\
data_r2etdsf
#
_cell.entry_id      2etd
_cell.length_a      80.4540
_cell.length_b      85.2590
_cell.length_c      53.3970
_cell.angle_alpha   90.0000
_cell.angle_beta    90.0000
_cell.angle_gamma   90.0000
#
_diffrn.id                  1
_diffrn.crystal_id          1
_diffrn.ambient_temp        ?
_diffrn.crystal_treatment   ?
#
loop_
_diffrn_radiation_wavelength.id
_diffrn_radiation_wavelength.wavelength
1 1.0163
2 0.9797
3 0.9796
#_entry.id   2etd
#
_exptl_crystal.id   1
#
_reflns_scale.group_code   1
#
_symmetry.entry_id               2etd
_symmetry.space_group_name_H-M   'C 2 2 2'
#
#
loop_
_refln.crystal_id
_refln.wavelength_id
_refln.scale_group_code
_refln.index_h
_refln.index_k
_refln.index_l
_refln.status
_refln.F_meas_au
_refln.F_meas_sigma_au
_refln.F_calc
_refln.phase_calc
1 1 1    0    2    8 o    149.0    3.1      558.1     0.0
1 1 1    0    2    9 o    217.9    4.3      119.3   180.0
1 1 1    0    2   10 o     33.3    5.3      340.6   180.0
1 1 1    0    2   11 o    389.0   10.0      692.8     0.0
1 1 1    0    2   12 o    143.5    3.7       54.5   180.0
1 1 1    0    2   13 f    801.4   36.0     1185.6     0.0
1 1 1    0    2   14 o    351.5   11.8      403.6   360.0
1 1 1    0    2   15 o    439.4   17.2      644.6   180.0
1 1 1    0    2   16 o     39.5    6.8      161.6   180.0
#END
data_r2etdAsf
#
_cell.entry_id      2etd
_cell.length_a      80.454
_cell.length_b      85.259
_cell.length_c      53.397
_cell.angle_alpha   90.000
_cell.angle_beta    90.000
_cell.angle_gamma   90.000
#
_diffrn.id                  1
_diffrn.crystal_id          1
_diffrn.ambient_temp        ?
_diffrn.crystal_treatment   ?
_diffrn.details
;   unmerged original index intensities of data set used for refinement and phasing.
;
#
_entry.id   2etd
#
_exptl_crystal.id   1
#
_reflns_scale.group_code   1
#
#
loop_
_refln.crystal_id
_refln.wavelength_id
_refln.scale_group_code
_refln.index_h
_refln.index_k
_refln.index_l
_refln.status
_refln.intensity_meas
_refln.intensity_sigma
1 1 1    0    2   -8  o    1544.6    130.4
1 1 1    0    2    9  o    3450.1    264.9
1 1 1    0   -2    9  o    3243.5    268.5
1 1 1    0   -2   -9  o    3475.4    265.0
1 1 1    0    2   -9  o    3420.1    260.3
1 1 1    0    2   10  o      58.8     31.8
1 1 1    0   -2  -10  o     131.7     50.7
1 1 1    0    2  -10  o      97.9     48.7
1 1 1    0   -2   11  o    9808.3    953.5
1 1 1    0   -2  -11  o   11486.1    970.3
1 1 1    0    2  -11  o   11278.1    967.8
1 1 1    0    2   12  o    1368.9    150.7
1 1 1    0   -2   12  o    1620.9    148.7
1 1 1    0   -2  -12  o    1293.5    147.6
1 1 1    0    2  -12  o    1619.5    155.7
1 1 1    0   -2   13  o   47438.3   4104.1
1 1 1    0    2   14  o    8577.5   1188.0
1 1 1    0   -2   14  o    7996.7   1179.9
1 1 1    0   -2  -14  o    9333.7   1178.1
1 1 1    0    2  -14  o    9642.3   1197.6
1 1 1    0    2   15  o   13577.7   1852.4
1 1 1    0   -2   15  o   14100.9   1852.8
1 1 1    0   -2  -15  o   14184.2   1871.6
1 1 1    0    2   16  o     135.6     76.3
1 1 1    0   -2   16  o     117.0     60.4
#END
data_r2etdBsf
#
_cell.entry_id      2etd
_cell.length_a      80.419
_cell.length_b      85.227
_cell.length_c      53.389
_cell.angle_alpha   90.000
_cell.angle_beta    90.000
_cell.angle_gamma   90.000
#
_diffrn.id                  1
_diffrn.crystal_id          1
_diffrn.ambient_temp        ?
_diffrn.crystal_treatment   ?
_diffrn.details
'   unmerged original index intensities of data set used for phasing.'
#
_entry.id   2etd
#
_exptl_crystal.id   1
#
_reflns_scale.group_code   1
#
loop_
_refln.crystal_id
_refln.wavelength_id
_refln.scale_group_code
_refln.index_h
_refln.index_k
_refln.index_l
_refln.status
_refln.intensity_meas
_refln.intensity_sigma
1 2 1    0    2    8  o    1095.4    110.5
1 2 1    0   -2    8  o    1105.1    117.9
1 2 1    0   -2   -8  o    1055.4    108.6
1 2 1    0    2   -8  o    1105.1    118.0
1 2 1    0    2    9  o    3795.1    366.5
1 2 1    0   -2    9  o    3925.3    371.7
1 2 1    0   -2   -9  o    3726.2    366.9
1 2 1    0    2   -9  o    3899.3    371.8
1 2 1    0    2   10  o      27.0     40.7
1 2 1    0   -2   10  o      80.7     34.8
1 2 1    0   -2  -10  o      18.3     40.3
1 2 1    0    2  -10  o      16.5     45.4
1 2 1    0    2   11  o   10830.4    977.6
1 2 1    0   -2   11  o    9647.1    972.1
1 2 1    0   -2  -11  o   10854.6    983.4
1 2 1    0    2  -11  o   10369.4    976.0
1 2 1    0    2   12  o    1143.7    131.7
1 2 1    0   -2   12  o    1378.0    140.1
1 2 1    0   -2  -12  o    1090.9    134.2
1 2 1    0    2  -12  o    1290.6    135.6
1 2 1    0   -2   13  o   44492.5   4083.0
1 2 1    0    2  -13  o   44931.1   4102.4
1 2 1    0    2   14  o    6646.0    952.7
1 2 1    0   -2   14  o    6516.6    949.1
1 2 1    0   -2  -14  o    7589.5    952.0
1 2 1    0    2  -14  o    7559.7    954.2
1 2 1    0    2   16  o     256.6     68.6
1 2 1    0   -2   16  o     137.1     67.8
1 2 1    0   -2  -16  o     307.6     69.9
1 2 1    0    2  -16  o     265.4     71.5
"""
    open("r2etd-sf.cif", "w").write(cif_in)
    args = ["phenix.cif_as_mtz", "r2etd-sf.cif", "--merge"]
    rc = easy_run.fully_buffered(" ".join(args)).raise_if_errors().return_code
    assert (rc == 0)
    hkl_in = file_reader.any_file("r2etd-sf.mtz")
    l_w = []
    for array in hkl_in.file_server.miller_arrays:
        l_w.append(
            (array.info().label_string(), "%.4f" % array.info().wavelength))
    assert (sorted(l_w) == sorted([('FOBS,SIGFOBS', '1.0163'),
                                   ('FC,PHIFC', '1.0163'),
                                   ('R-free-flags', '1.0163'),
                                   ('I(+),SIGI(+),I(-),SIGI(-)', '1.0163'),
                                   ('I2(+),SIGI2(+),I2(-),SIGI2(-)', '0.9797')
                                   ]))
Exemplo n.º 45
0
    def run(self, idx, img):
        if os.path.isfile(self.term_file):
            raise IOTATermination('IOTA_TRACKER: Termination signal received!')
        else:
            # First, parse filepath to create Mosflm template
            directory = os.path.dirname(img)
            filepath = os.path.basename(img).split('.')
            fname = filepath[0]
            extension = filepath[1]
            if '_' in fname:
                suffix = fname.split('_')[-1]
            elif '-' in fname:
                suffix = fname.split('-')[-1]
            elif '.' in fname:
                suffix = fname.split('.')[-1]
            img_number = int(''.join(n if n.isdigit() else '' for n in suffix))
            prefix = fname.replace(suffix, '')
            n_suffix = ''.join("#" if c.isdigit() else c for c in suffix)
            template = '{}{}.{}'.format(prefix, n_suffix, extension)

            # Create autoindex.com w/ Mosflm script
            # Write to temporary file and change permissions to run
            autoindex = [
                '#! /bin/tcsh -fe', 'ipmosflm << eof-ipmosflm'.format(fname),
                'NEWMATRIX {0}.mat'.format(fname),
                'DIRECTORY {}'.format(directory),
                'TEMPLATE {}'.format(template),
                'AUTOINDEX DPS THRESH 0.1 IMAGE {} PHI 0 0.01'.format(
                    img_number), 'GO', 'eof-ipmosflm'
            ]
            autoindex_string = '\n'.join(autoindex)
            autoindex_filename = 'autoindex_{}.com'.format(idx)

            with open(autoindex_filename, 'w') as af:
                af.write(autoindex_string)
            os.chmod(autoindex_filename, 0755)

            # Run Mosflm autoindexing
            command = './{}'.format(autoindex_filename)
            out = easy_run.fully_buffered(command, join_stdout_stderr=True)

            # Scrub text output
            final_spots = [
                l for l in out.stdout_lines if 'spots written for image' in l
            ]
            final_cell_line = [
                l for l in out.stdout_lines if 'Final cell' in l
            ]
            final_sg_line = [l for l in out.stdout_lines if 'space group' in l]

            if final_spots != []:
                spots = final_spots[0].rsplit()[0]
            else:
                spots = 0
            if final_cell_line != []:
                cell = final_cell_line[0].replace(
                    'Final cell (after refinement) is', '').rsplit()
            else:
                cell = None
            if final_sg_line != []:
                sg = final_sg_line[0].rsplit()[6]
            else:
                sg = None

            # Temp file cleanup
            try:
                os.remove('{}.mat'.format(fname))
            except Exception:
                pass
            try:
                os.remove('{}.spt'.format(prefix[:-1]))
            except Exception:
                pass
            try:
                os.remove('SUMMARY')
            except Exception:
                pass
            try:
                os.remove(autoindex_filename)
            except Exception:
                pass

            return [idx, spots, img, sg, cell]
Exemplo n.º 46
0
def test_sacla_h5(dials_data, run_in_tmpdir, use_mpi, in_memory=False):
    # Only allow MPI tests if we've got MPI capabilities
    if use_mpi:
        pytest.importorskip("mpi4py")

    # Check the data files for this test exist
    sacla_path = dials_data("image_examples")
    image_path = os.path.join(sacla_path, "SACLA-MPCCD-run266702-0-subset.h5")
    assert os.path.isfile(image_path)

    geometry_path = os.path.join(
        sacla_path,
        "SACLA-MPCCD-run266702-0-subset-refined_experiments_level1.json")
    assert os.path.isfile(geometry_path)

    # Write the .phil configuration to a file
    with open("process_sacla.phil", "w") as f:
        f.write(sacla_phil % geometry_path)

    # Call dials.stills_process
    if use_mpi:
        command = [
            "mpirun",
            "-n",
            "4",
            "dials.stills_process",
            "mp.method=mpi mp.composite_stride=4 output.logging_dir=.",
        ]
    else:
        command = ["dials.stills_process"]
    command += [image_path, "process_sacla.phil"]
    result = easy_run.fully_buffered(command).raise_if_errors()
    result.show_stdout()

    def test_refl_table(result_filename, ranges):
        table = flex.reflection_table.from_file(result_filename)
        for expt_id, n_refls in enumerate(ranges):
            subset = table.select(table["id"] == expt_id)
            assert len(subset) in n_refls, (result_filename, expt_id,
                                            len(table))
        assert "id" in table
        assert set(table["id"]) == {0, 1, 2, 3}

    # large ranges to handle platform-specific differences
    test_refl_table(
        "idx-0000_integrated.refl",
        [
            list(range(140, 160)),
            list(range(575, 600)),
            list(range(420, 445)),
            list(range(485, 510)),
        ],
    )

    test_refl_table(
        "idx-0000_coset6.refl",
        [
            list(range(145, 160)),
            list(range(545, 570)),
            list(range(430, 455)),
            list(range(490, 515)),
        ],
    )
Exemplo n.º 47
0
def run_ksdssp_direct(pdb_str):
  exe_path = get_ksdssp_exe_path()
  ksdssp_out = easy_run.fully_buffered(command=exe_path, stdin_lines=pdb_str)
  return ( ksdssp_out.stdout_lines, ksdssp_out.stderr_lines )
Exemplo n.º 48
0
def build_run(setup_cmd, ld_preload_flag, n_scatt, n_refl, build_cmd,
              check_max_a_b):
    if (op.isfile("a.out")):
        os.remove("a.out")
    assert not op.isfile("a.out")
    print(build_cmd)
    buffers = easy_run.fully_buffered(command=build_cmd)
    msg = buffers.format_errors_if_any()
    if (msg is not None):
        if (0):
            print(build_cmd)
            print()
            print(msg)
            print()
            STOP()
        return None
    assert op.isfile("a.out")
    run_cmd = setup_cmd
    if (ld_preload_flag):
        run_cmd += 'env LD_PRELOAD='\
        '"/net/marbles/raid1/rwgk/dist/opt_resources/linux64/libimf.so:"'\
        '"/net/marbles/raid1/rwgk/dist/opt_resources/linux64/libirc.so" '
    utimes = []
    run_cmd += '/usr/bin/time -p ./a.out'

    def run_once():
        buffers = easy_run.fully_buffered(command=run_cmd)
        if (len(buffers.stderr_lines) != 3):
            print("v" * 79)
            print("\n".join(buffers.stderr_lines))
            print("^" * 79)
            raise RuntimeError("Unexpected number of output lines"
                               " (3 expected; acutal output see above).")
        if (n_scatt == 0):
            pass
        elif (n_scatt <= 10 and n_refl <= 100):
            assert len(buffers.stdout_lines) == n_scatt + n_refl
        else:
            assert len(buffers.stdout_lines) == 1
            max_a, max_b = [float(s) for s in buffers.stdout_lines[0].split()]
        if (check_max_a_b):
            if (n_scatt == 2000 and n_refl == 20000):
                assert approx_equal(max_a, 35.047157, eps=1e-4)
                assert approx_equal(max_b, 25.212738, eps=1e-4)
            elif (n_scatt == 100 and n_refl == 1000):
                assert approx_equal(max_a, 4.493645, eps=1e-4)
                assert approx_equal(max_b, 10.515532, eps=1e-4)
            elif (n_scatt <= 10 and n_refl <= 100):
                if (libtbx.env.has_module(name="cctbx")):
                    compare_with_cctbx_structure_factors(
                        n_scatt=n_scatt,
                        n_refl=n_refl,
                        output_lines=buffers.stdout_lines)
            else:
                raise RuntimeError(max_a, max_b)
        utime = float(buffers.stderr_lines[1].split()[1])
        utimes.append(utime)
        print("sample utime: %.2f" % utime)
        sys.stdout.flush()

    for _ in range(8):
        run_once()
    return min(utimes)
Exemplo n.º 49
0
def exercise_anomalous_isomorphous_difference_map():
    pdb_1 = """\
CRYST1   32.247   37.398   33.613  90.00  90.00  90.00 P 1
HETATM 1983  PG  AGS A 340       5.029   2.780   3.852  1.00 60.68           P
HETATM 1984  S1G AGS A 340       3.762   2.077   2.900  1.00 64.20           S
HETATM 1985  O2G AGS A 340       6.396   1.935   3.849  1.00 61.32           O
HETATM 1986  O3G AGS A 340       4.528   2.885   5.348  1.00 57.78           O
HETATM 1987  PB  AGS A 340       4.068   5.371   3.258  1.00 53.67           P
HETATM 1988  O1B AGS A 340       3.533   5.492   1.878  1.00 52.45           O
HETATM 1989  O2B AGS A 340       2.939   5.014   4.243  1.00 53.83           O
HETATM 1990  O3B AGS A 340       5.255   4.278   3.263  1.00 56.71           O
HETATM 1991  PA  AGS A 340       5.581   6.888   5.132  1.00 42.66           P
HETATM 1992  O1A AGS A 340       4.625   6.671   6.259  1.00 43.69           O
HETATM 1993  O2A AGS A 340       6.834   5.959   5.235  1.00 45.93           O
HETATM 1994  O3A AGS A 340       4.800   6.740   3.720  1.00 48.66           O
HETATM 1995  O5' AGS A 340       6.098   8.409   5.107  1.00 36.10           O
HETATM 1996  C5' AGS A 340       7.059   8.753   4.138  1.00 31.02           C
HETATM 1997  C4' AGS A 340       7.848   9.969   4.591  1.00 29.79           C
HETATM 1998  O4' AGS A 340       6.947  11.004   4.917  1.00 30.10           O
HETATM 1999  C3' AGS A 340       8.682   9.711   5.831  1.00 30.12           C
HETATM 2000  O3' AGS A 340       9.998   9.322   5.503  1.00 29.10           O
HETATM 2001  C2' AGS A 340       8.619  11.034   6.578  1.00 30.77           C
HETATM 2002  O2' AGS A 340       9.677  11.886   6.190  1.00 29.37           O
HETATM 2003  C1' AGS A 340       7.319  11.667   6.111  1.00 30.26           C
HETATM 2004  N9  AGS A 340       6.261  11.498   7.129  1.00 27.43           N
HETATM 2005  C8  AGS A 340       5.454  10.400   7.302  1.00 27.85           C
HETATM 2006  N7  AGS A 340       4.609  10.631   8.330  1.00 29.03           N
HETATM 2007  C5  AGS A 340       4.862  11.863   8.817  1.00 26.18           C
HETATM 2008  C6  AGS A 340       4.295  12.588   9.859  1.00 26.06           C
HETATM 2009  N6  AGS A 340       3.275  12.079  10.559  1.00 28.35           N
HETATM 2010  N1  AGS A 340       4.767  13.859  10.138  1.00 29.55           N
HETATM 2011  C2  AGS A 340       5.794  14.401   9.385  1.00 29.19           C
HETATM 2012  N3  AGS A 340       6.356  13.672   8.351  1.00 28.57           N
HETATM 2013  C4  AGS A 340       5.896  12.422   8.071  1.00 25.89           C
HETATM 2014 MN    MN A 341       2.683   3.335   5.745  1.00 47.43          MN
"""
    pdb_2 = """\
CRYST1   32.247   37.398   33.613  90.00  90.00  90.00 P 1
HETATM 1983  PG  AGS A 340       5.029   2.780   3.852  1.00 60.68           P
HETATM 1984  S1G AGS A 340       3.762   2.077   2.900  1.00 64.20           S
HETATM 1985  O2G AGS A 340       6.396   1.935   3.849  1.00 61.32           O
HETATM 1986  O3G AGS A 340       4.528   2.885   5.348  1.00 57.78           O
HETATM 1987  PB  AGS A 340       4.068   5.371   3.258  1.00 53.67           P
HETATM 1988  O1B AGS A 340       3.533   5.492   1.878  1.00 52.45           O
HETATM 1989  O2B AGS A 340       2.939   5.014   4.243  1.00 53.83           O
HETATM 1990  O3B AGS A 340       5.255   4.278   3.263  1.00 56.71           O
HETATM 1991  PA  AGS A 340       5.581   6.888   5.132  1.00 42.66           P
HETATM 1992  O1A AGS A 340       4.625   6.671   6.259  1.00 43.69           O
HETATM 1993  O2A AGS A 340       6.834   5.959   5.235  1.00 45.93           O
HETATM 1994  O3A AGS A 340       4.800   6.740   3.720  1.00 48.66           O
HETATM 1995  O5' AGS A 340       6.098   8.409   5.107  1.00 36.10           O
HETATM 1996  C5' AGS A 340       7.059   8.753   4.138  1.00 31.02           C
HETATM 1997  C4' AGS A 340       7.848   9.969   4.591  1.00 29.79           C
HETATM 1998  O4' AGS A 340       6.947  11.004   4.917  1.00 30.10           O
HETATM 1999  C3' AGS A 340       8.682   9.711   5.831  1.00 30.12           C
HETATM 2000  O3' AGS A 340       9.998   9.322   5.503  1.00 29.10           O
HETATM 2001  C2' AGS A 340       8.619  11.034   6.578  1.00 30.77           C
HETATM 2002  O2' AGS A 340       9.677  11.886   6.190  1.00 29.37           O
HETATM 2003  C1' AGS A 340       7.319  11.667   6.111  1.00 30.26           C
HETATM 2004  N9  AGS A 340       6.261  11.498   7.129  1.00 27.43           N
HETATM 2005  C8  AGS A 340       5.454  10.400   7.302  1.00 27.85           C
HETATM 2006  N7  AGS A 340       4.609  10.631   8.330  1.00 29.03           N
HETATM 2007  C5  AGS A 340       4.862  11.863   8.817  1.00 26.18           C
HETATM 2008  C6  AGS A 340       4.295  12.588   9.859  1.00 26.06           C
HETATM 2009  N6  AGS A 340       3.275  12.079  10.559  1.00 28.35           N
HETATM 2010  N1  AGS A 340       4.767  13.859  10.138  1.00 29.55           N
HETATM 2011  C2  AGS A 340       5.794  14.401   9.385  1.00 29.19           C
HETATM 2012  N3  AGS A 340       6.356  13.672   8.351  1.00 28.57           N
HETATM 2013  C4  AGS A 340       5.896  12.422   8.071  1.00 25.89           C
HETATM 2014 MN    MN A 341       1.319   3.281   7.076  0.50 47.43          MN
HETATM   60  C   ACT     1       2.759   8.395   7.862  1.00 18.56           C
HETATM   61  O   ACT     1       3.812   8.507   7.193  1.00 19.26           O
HETATM   62  OXT ACT     1       2.218   9.388   8.383  1.00 18.36           O
HETATM   63  CH3 ACT     1       2.127   7.078   8.030  1.00 18.08           C
"""
    open("tst_anom_iso_diff_1.pdb", "w").write(pdb_1)
    open("tst_anom_iso_diff_2.pdb", "w").write(pdb_2)
    base_args = [
        "phenix.fmodel",
        "high_resolution=1.5",
        "type=real",
        "label=F",
        "r_free_flags_fraction=0.1",
        "wavelength=1.77",
        "add_random_error_to_amplitudes_percent=5",
    ]
    args_1 = base_args + [
        "tst_anom_iso_diff_1.pdb", "output.file_name=tst_anom_iso_diff_1.mtz"
    ]
    args_2 = base_args + [
        "tst_anom_iso_diff_2.pdb", "output.file_name=tst_anom_iso_diff_2.mtz"
    ]
    print(" ".join(args_1))
    rc = easy_run.fully_buffered(
        " ".join(args_1)).raise_if_errors().return_code
    assert (rc == 0)
    print(" ".join(args_2))
    rc = easy_run.fully_buffered(
        " ".join(args_2)).raise_if_errors().return_code
    assert (rc == 0)
    base_args = [
        "phenix.fobs_minus_fobs_map",
        "f_obs_1_file=tst_anom_iso_diff_2.mtz",
        "f_obs_2_file=tst_anom_iso_diff_1.mtz",
        "tst_anom_iso_diff_2.pdb",
        "omit_selection=\"element MN\"",
    ]
    args_1 = base_args + [
        "output_file=tst_anom_iso_diff_map_coeffs.mtz",
        "anomalous=True",
    ]
    args_2 = base_args + [
        "output_file=tst_anom_iso_diff_map_coeffs_control.mtz"
    ]
    print(" ".join(args_1))
    rc = easy_run.fully_buffered(
        " ".join(args_1)).raise_if_errors().return_code
    assert (rc == 0)
    print(" ".join(args_2))
    rc = easy_run.fully_buffered(
        " ".join(args_2)).raise_if_errors().return_code
    assert (rc == 0)
    # The input structures differ in the position of the MN ion and in the
    # presence of an acetate ion in the second model.  I am calculating two
    # isomorphous difference maps: one using the anomalous differences of each
    # dataset, and a control using the merged amplitudes. The MN should be
    # prominent in both, and by far the strongest feature in the anomalous map,
    # but the ACT should only have density in the control map.
    from iotbx import file_reader
    import iotbx.pdb.hierarchy
    pdb_in = iotbx.pdb.hierarchy.input(pdb_string=pdb_2)
    xrs = pdb_in.input.xray_structure_simple()
    mtz_1 = file_reader.any_file("tst_anom_iso_diff_map_coeffs.mtz")
    map_1 = mtz_1.file_server.miller_arrays[0].fft_map(
        resolution_factor=0.25).apply_sigma_scaling().real_map_unpadded()
    mtz_2 = file_reader.any_file("tst_anom_iso_diff_map_coeffs_control.mtz")
    map_2 = mtz_2.file_server.miller_arrays[0].fft_map(
        resolution_factor=0.25).apply_sigma_scaling().real_map_unpadded()
    sites_frac = xrs.sites_frac()
    anom_max = mn_anom = 0
    for i_seq, atom in enumerate(pdb_in.hierarchy.atoms()):
        site_frac = sites_frac[i_seq]
        anom_diff = map_1.eight_point_interpolation(site_frac)
        fobs_diff = map_2.eight_point_interpolation(site_frac)
        labels = atom.fetch_labels()
        if (labels.resname.strip() == "MN"):
            assert (anom_diff > 10) and (fobs_diff > 5)
            mn_anom = anom_diff
        elif (labels.resname.strip() == "ACT"):
            assert (anom_diff < 3) and (fobs_diff > 5)
        if (anom_diff > anom_max):
            anom_max = anom_diff
    assert (anom_max == mn_anom)
Exemplo n.º 50
0
def plot_multirun_stats(runs,
                        run_numbers,
                        d_min,
                        n_multiples=2,
                        ratio_cutoff=1,
                        n_strong_cutoff=40,
                        i_sigi_cutoff=1,
                        run_tags=[],
                        run_statuses=[],
                        minimalist=False,
                        interactive=False,
                        easy_run=False,
                        compress_runs=True,
                        xsize=30,
                        ysize=10,
                        high_vis=False,
                        title=None):
    tset = flex.double()
    two_theta_low_set = flex.double()
    two_theta_high_set = flex.double()
    nset = flex.int()
    resolutions_set = flex.double()
    n_lattices = flex.int()
    boundaries = []
    lengths = []
    runs_with_data = []
    offset = 0
    for idx in xrange(len(runs)):
        r = runs[idx]
        if len(r[0]) > 0:
            if compress_runs:
                tslice = r[0] - r[0][0] + offset
                offset += (r[0][-1] - r[0][0] + 1 / 120.)
            else:
                tslice = r[0]
            last_end = r[0][-1]
            tset.extend(tslice)
            two_theta_low_set.extend(r[1])
            two_theta_high_set.extend(r[2])
            nset.extend(r[3])
            resolutions_set.extend(r[4])
            n_lattices.extend(r[5])
            boundaries.append(tslice[0])
            boundaries.append(tslice[-1])
            lengths.append(len(tslice))
            runs_with_data.append(run_numbers[idx])
        else:
            boundaries.extend([None] * 2)
    stats_tuple = get_run_stats(tset,
                                two_theta_low_set,
                                two_theta_high_set,
                                nset,
                                resolutions_set,
                                n_lattices,
                                tuple(boundaries),
                                tuple(lengths),
                                runs_with_data,
                                n_multiples=n_multiples,
                                ratio_cutoff=ratio_cutoff,
                                n_strong_cutoff=n_strong_cutoff,
                                i_sigi_cutoff=i_sigi_cutoff,
                                d_min=d_min)
    if easy_run:
        from libtbx import easy_run, easy_pickle
        easy_pickle.dump(
            "plot_run_stats_tmp.pickle",
            (stats_tuple, d_min, n_multiples, run_tags, run_statuses,
             minimalist, interactive, xsize, ysize, high_vis, title))
        result = easy_run.fully_buffered(
            command=
            "cctbx.xfel.plot_run_stats_from_stats_pickle plot_run_stats_tmp.pickle"
        )
        try:
            png = result.stdout_lines[-1]
            if png == "None":
                return None
        except Exception:
            return None
    else:
        png = plot_run_stats(stats_tuple,
                             d_min,
                             n_multiples=n_multiples,
                             run_tags=run_tags,
                             run_statuses=run_statuses,
                             minimalist=minimalist,
                             interactive=interactive,
                             xsize=xsize,
                             ysize=ysize,
                             high_vis=high_vis,
                             title=title)
    return png
Exemplo n.º 51
0
def test2():
    """Run scan-varying refinement, comparing RMSD table with expected values.
  This test automates what was manually done periodically and recorded in
  dials_regression/refinement_test_data/centroid/README.txt"""

    dials_regression = libtbx.env.find_in_repositories(
        relative_path="dials_regression", test=os.path.isdir)

    # use the i04_weak_data for this test
    data_dir = os.path.join(dials_regression, "refinement_test_data",
                            "centroid")
    experiments_path = os.path.join(data_dir,
                                    "experiments_XPARM_REGULARIZED.json")
    pickle_path = os.path.join(data_dir, "spot_all_xds.pickle")

    for pth in (experiments_path, pickle_path):
        assert os.path.exists(pth)

    # scan-static refinement first to get refined_experiments.json as start point
    cmd1 = "dials.refine " + experiments_path + " " + pickle_path + \
      " reflections_per_degree=50 " + \
      " outlier.algorithm=null close_to_spindle_cutoff=0.05"
    cmd2 = "dials.refine refined_experiments.json " + pickle_path + \
      " scan_varying=true output.history=history.pickle " + \
      " reflections_per_degree=50 " + \
      " outlier.algorithm=null close_to_spindle_cutoff=0.05"

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_refine")
    os.chdir(tmp_dir)
    try:
        print cmd1
        result1 = easy_run.fully_buffered(command=cmd1).raise_if_errors()
        print cmd2
        result2 = easy_run.fully_buffered(command=cmd2).raise_if_errors()
        # load and check results
        import cPickle as pickle
        history = pickle.load(open("history.pickle", "r"))

        expected_rmsds = [(0.088488398, 0.114583571, 0.001460382),
                          (0.080489334, 0.086406517, 0.001284069),
                          (0.078835086, 0.086052630, 0.001195882),
                          (0.077476911, 0.086194611, 0.001161143),
                          (0.076755840, 0.086090630, 0.001157239),
                          (0.076586376, 0.085939462, 0.001155641),
                          (0.076603722, 0.085878953, 0.001155065),
                          (0.076611382, 0.085862959, 0.001154863),
                          (0.076608732, 0.085856935, 0.001154384),
                          (0.076605731, 0.085852271, 0.001153858),
                          (0.076604576, 0.085852318, 0.001153643),
                          (0.076603981, 0.085854175, 0.001153594)]

        assert approx_equal(history['rmsd'], expected_rmsds)

        # check that the used_in_refinement flag got set correctly
        rt = flex.reflection_table.from_pickle('refined.pickle')
        uir = rt.get_flags(rt.flags.used_in_refinement)
        assert uir.count(True) == history['num_reflections'][-1]

    finally:
        os.chdir(cwd)

    print "OK"
    return
Exemplo n.º 52
0
def run_angle(prefix):
    edits = """
refinement {
  geometry_restraints.edits {
    angle {
      action = %s
      atom_selection_1 = %s
      atom_selection_2 = %s
      atom_selection_3 = %s
      angle_ideal = 10
      sigma = 1
    }
  }
}
"""
    sel_old_1 = "chain A and resseq 7 and name N"
    sel_old_2 = "chain A and resseq 7 and name CA"
    sel_old_3 = "chain A and resseq 7 and name CB"
    sel_new_1 = "chain A and resseq 7 and name N"
    sel_new_2 = "chain A and resseq 7 and name CA"
    sel_new_3 = "chain A and resseq 7 and name CG2"
    cntr = 0
    for action in ["add", "delete", "change"]:
        for kind in ["new", "old"]:
            prefix_ = "%s_angle_%s_%s" % (prefix, kind, action)
            with open("%s.eff" % prefix_, "w") as f:
                f.write(edits %
                        (action, eval("sel_%s_1" % kind),
                         eval("sel_%s_2" % kind), eval("sel_%s_3" % kind)))
            cmd = cmd_base % (prefix, prefix_)
            print(cmd)
            r = easy_run.fully_buffered(cmd)
            if action == "add":
                if kind == "new":
                    cntr += 1
                    assert_lines_in_file(file_name="%s.pdb.geo" % prefix,
                                         lines="""
angle pdb=" N   ILE A   7 "
      pdb=" CA  ILE A   7 "
      pdb=" CG2 ILE A   7 "
    ideal   model   delta    sigma   weight residual
    10.00  143.31 -133.31 1.00e+00 1.00e+00 1.78e+04""")
                elif kind == "old":
                    cntr += 1
                    assert r.stderr_lines[0] == \
                        "Sorry: Some restraints were not added because they are already present."
            if action == "delete":
                cntr += 1
                assert r.stderr_lines[0] == \
                    'Sorry: geometry_restraints.edits.angle.action = delete not implemented.'
            if action == "change":
                if kind == "new":
                    cntr += 1
                    assert r.stderr_lines[0] == \
                        'Sorry: Angle below is not restrained, nothing to change.'
                elif kind == "old":
                    cntr += 1
                    assert_lines_in_file(file_name="%s.pdb.geo" % prefix,
                                         lines="""
angle pdb=" N   ILE A   7 "
      pdb=" CA  ILE A   7 "
      pdb=" CB  ILE A   7 "
    ideal   model   delta    sigma   weight residual
    10.00  109.54  -99.54 1.00e+00 1.00e+00 9.91e+03""")
    assert cntr == 6
Exemplo n.º 53
0
    def submit(self, previous_job=None):
        from xfel.command_line.striping import Script
        from xfel.command_line.cxi_mpi_submit import get_submission_id
        from libtbx import easy_run
        configs_dir = os.path.join(settings_dir, "cfgs")
        identifier_string = self.get_identifier_string()
        target_phil_path = os.path.join(configs_dir,
                                        identifier_string + "_params.phil")
        with open(target_phil_path, 'w') as f:
            if self.task.parameters:
                f.write(self.task.parameters)

        path = get_run_path(self.app.params.output_folder, self.trial,
                            self.rungroup, self.run, self.task)
        os.mkdir(path)

        arguments = """
    mp.queue={}
    mp.nproc={}
    mp.nproc_per_node={}
    mp.method={}
    {}
    mp.use_mpi=False
    striping.results_dir={}
    striping.trial={}
    striping.rungroup={}
    striping.run={}
    {}
    striping.chunk_size=3000
    striping.stripe=False
    striping.dry_run=True
    striping.output_folder={}
    reintegration.integration.lookup.mask={}
    mp.local.include_mp_in_command=False
    """.format(
            self.app.params.mp.queue
            if len(self.app.params.mp.queue) > 0 else None,
            self.app.params.mp.nproc,
            self.app.params.mp.nproc_per_node,
            self.app.params.mp.method,
            '\n'.join([
                'mp.env_script={}'.format(p)
                for p in self.app.params.mp.env_script if p
            ]),
            self.app.params.output_folder,
            self.trial.trial,
            self.rungroup.id,
            self.run.run,
            target_phil_path,
            path,
            self.rungroup.untrusted_pixel_mask_path,
        ).split()

        commands = Script(arguments).run()
        submission_ids = []
        if self.app.params.mp.method == 'local':
            self.status = "RUNNING"
        for command in commands:
            try:
                result = easy_run.fully_buffered(command=command)
                result.raise_if_errors()
            except Exception as e:
                if not "Warning: job being submitted without an AFS token." in str(
                        e):
                    raise e
            submission_ids.append(
                get_submission_id(result, self.app.params.mp.method))
        if self.app.params.mp.method == 'local':
            self.status = "DONE"
        else:
            return ",".join(submission_ids)
Exemplo n.º 54
0
def exercise_misc():
  utils.host_and_user().show(prefix="### ")
  time_in_seconds = 1.1
  for i_trial in range(55):
    time_in_seconds = time_in_seconds**1.1
    time_units, time_unit = utils.human_readable_time(
      time_in_seconds=time_in_seconds)
    assert approx_equal(
      utils.human_readable_time_as_seconds(time_units, time_unit),
      time_in_seconds)
  #
  fts = utils.format_timestamp
  f12 = utils.format_timestamp_12_hour
  f24 = utils.format_timestamp_24_hour
  def check(string, expected):
    assert len(string) == len(expected)
  check(f12(1280007000), 'Jul 24 2010 02:30 PM')
  check(f24(1280007000), 'Jul 24 2010 14:30')
  check(f12(1280007000, True), '24-07-10 02:30 PM')
  check(f24(1280007000, True), '24-07-10 14:30')
  check(fts(1280007000), 'Jul 24 2010 02:30 PM')
  #
  nfs = utils.number_from_string
  for string in ["True", "False"]:
    try: nfs(string=string)
    except ValueError as e:
      assert str(e) == 'Error interpreting "%s" as a numeric expression.' % (
        string)
    else: raise Exception_expected
  assert nfs(string="-42") == -42
  assert approx_equal(nfs(string="3.14"), 3.14)
  assert approx_equal(nfs(string="cos(0)"), 1)
  try: nfs(string="xxx(0)")
  except ValueError as e:
    assert str(e).startswith(
      'Error interpreting "xxx(0)" as a numeric expression: ')
  else: raise Exception_expected
  #
  s = "[0.143139, -0.125121, None, -0.308607]"
  assert numstr(values=eval(s)) == s
  #
  for s,i in {"2000000" : 2000000,
              "2k" : 2048,
              "2Kb" : 2048,
              "2 Kb" : 2048,
              "5Mb" : 5*1024*1024,
              "2.5Gb" : 2.5*1024*1024*1024,
              "1T": 1024*1024*1024*1024,
              10000 : 10000,
              5.5 : 5.5,
              }.items():
    assert utils.get_memory_from_string(s) == i
  #
  assert utils.tupleize(1) == (1,)
  assert utils.tupleize("abcde") == ('a', 'b', 'c', 'd', 'e')
  assert utils.tupleize([1,2,3]) == (1,2,3)
  #
  sf = utils.search_for
  assert sf(pattern="fox", mode="==", lines=["fox", "foxes"]) \
      == ["fox"]
  assert sf(pattern="o", mode="find", lines=["fox", "bird", "mouse"]) \
      == ["fox", "mouse"]
  assert sf(pattern="fox", mode="startswith", lines=["fox", "foxes"]) \
      == ["fox", "foxes"]
  assert sf(pattern="xes", mode="endswith", lines=["fox", "foxes"]) \
      == ["foxes"]
  assert sf(pattern="es$", mode="re.search", lines=["geese", "foxes"]) \
      == ["foxes"]
  assert sf(pattern="ge", mode="re.match", lines=["geese", "angel"]) \
      == ["geese"]
  #
  nd1d = utils.n_dim_index_from_one_dim
  for size in range(1,5):
    for i1d in range(size):
      assert nd1d(i1d=i1d, sizes=(size,)) == [i1d]
  for sizes in [(1,1), (1,3), (3,1), (2,3)]:
    ni, nj = sizes
    for i in range(ni):
      for j in range(nj):
        i1d = i*nj+j
        assert nd1d(i1d=i1d, sizes=sizes) == [i,j]
  for sizes in [(1,1,1), (1,3,1), (3,2,1), (4,3,2)]:
    ni, nj, nk = sizes
    for i in range(ni):
      for j in range(nj):
        for k in range(nk):
          i1d = (i*nj+j)*nk+k
          assert nd1d(i1d=i1d, sizes=sizes) == [i,j,k]
  #
  from libtbx import easy_run
  b = easy_run.fully_buffered(
    command="libtbx.raise_exception_for_testing")
  for lines in [b.stdout_lines, b.stderr_lines]:
    assert lines[0].startswith("EXCEPTION_INFO: show_stack(0): ")
    assert lines[-1] == "EXCEPTION_INFO: RuntimeError: Just for testing."
  b = easy_run.fully_buffered(
    command="libtbx.raise_exception_for_testing silent")
  b.raise_if_errors_or_output()
  #
  frange = utils.frange
  samples = utils.samples
  assert approx_equal([i/10. for i in range(-2,2)], frange(-0.2,0.2,0.1))
  assert approx_equal([i/10. for i in range(-2,2+1)], samples(-0.2,0.2,0.1))
  assert approx_equal([i/10. for i in range(2,-2,-1)], frange(0.2,-0.2,-0.1))
  assert approx_equal([i/10. for i in range(2,-2-1,-1)], samples(0.2,-0.2,-0.1))
  assert approx_equal([i/4. for i in range(4,8)], frange(1, 2, 0.25))
  assert approx_equal([i/4. for i in range(4,8+1)], samples(1, 2, 0.25))
  assert approx_equal([0.2+i/3. for i in range(4)], frange(0.2, 1.3, 1./3))
  assert approx_equal([0.2+i/3. for i in range(4)], samples(0.2, 1.3, 1./3))
  assert approx_equal(list(range(5)) , frange(5))
  assert approx_equal(list(range(5+1)) , samples(5))
  assert approx_equal(list(range(-5)), frange(-5))
  assert approx_equal(list(range(-5-1)), samples(-5))
  assert approx_equal(list(range(1,3)), frange(1, 3))
  assert approx_equal(list(range(1,3+1)), samples(1, 3))
  assert approx_equal([i/10. for i in range(20,9,-2)], frange(2.0,0.9,-0.2))
  assert approx_equal([i/10. for i in range(20,9,-2)], samples(2.0,0.9,-0.2))
  #
  ff = utils.format_float_with_standard_uncertainty
  assert ff(21.234567, 0.0013) == "21.2346(13)"
  assert ff(21.234567, 0.0023) == "21.235(2)"
  assert ff(12345, 45) == "12350(50)"
  assert ff(12.3,1.2) == "12.3(12)"
  assert ff(-0.2451, 0.8135) == "-0.2(8)"
  assert ff(1.234, 0.196) == "1.2(2)"
  assert ff(1.234, 0.193) == "1.23(19)"
  #
  for n in range(4):
    assert len(utils.random_hex_code(number_of_digits=n)) == n
  #
  print("multiprocessing problem:", utils.detect_multiprocessing_problem())
  #
  print("base36_timestamp():", utils.base36_timestamp(), "now")
  print("base36_timestamp():", utils.base36_timestamp(
    seconds_since_epoch=115855*365.2425*24*60*60), "year 115855 CE")
  #
  print("get_svn_revision():", utils.get_svn_revision())
  print("get_build_tag():", utils.get_build_tag())
  # concatenate_python_script
  # XXX the string concatenation here is required to trick libtbx.find_clutter,
  # which will warn about repetition of the future division import.
  script = """
from __future__ """ + """import division
import os.path

def foo():
  print "bar"
"""
  d = tempfile.mkdtemp()
  name = os.path.join(d, "tst_libtbx_utils_python_script.py")
  name2 = os.path.join(d, "tst_libtbx_utils_python_script2.py")
  open(name, "w").write(script)
  f = open(name2, "w")
  utils.concatenate_python_script(out=f, file_name=name)
  f.close()
  lines = open(name2).readlines()
  have_def = False
  for line in lines :
    assert (not "__future__" in line)
    if line.startswith("def foo"):
      have_def = True
  assert have_def
Exemplo n.º 55
0
def run_combinations(compiler_versions, all_utimes, n_scatt, n_refl,
                     compiler_build_opts_list, real_list):
    for lang, setup_sh_list, compiler, build_opts in compiler_build_opts_list:
        for setup_sh in setup_sh_list:
            if (setup_sh is None):
                setup_cmd = ""
            else:
                setup_cmd = ". %s/%s; " % (setup_dir, setup_sh)
            compiler_version = easy_run.fully_buffered(
                command=setup_cmd + compiler + " --version",
                join_stdout_stderr=True).stdout_lines[0]
            if (lang in ["f", "c"]):
                ld_preload_flags = [False, True]
            else:
                ld_preload_flags = [False]
            for ld_preload_flag in ld_preload_flags:
                iml = ["", " Intel Math Lib"][int(ld_preload_flag)]
                compiler_versions.append(compiler_version + iml)
                build_cmd = " ".join([setup_cmd + compiler, build_opts])
                print(build_cmd)
                utimes = []
                if (n_scatt != 0):
                    for real in real_list:
                        print("  %s" % real)
                        for replace_cos in [False, True]:
                            print("    replace_cos", replace_cos)
                            for replace_exp in [False, True]:
                                print("      replace_exp", replace_exp)
                                sys.stdout.flush()
                                if (compiler_version != "n/a"):
                                    utime = write_build_run(
                                        setup_cmd=setup_cmd,
                                        ld_preload_flag=ld_preload_flag,
                                        n_scatt=n_scatt,
                                        n_refl=n_refl,
                                        real=real,
                                        lang=lang,
                                        build_cmd=build_cmd,
                                        replace_cos=replace_cos,
                                        replace_exp=replace_exp)
                                    if (utime is not None):
                                        print("        %4.2f" % utime)
                                    else:
                                        utime = -1.0
                                        print("        err")
                                else:
                                    utime = -1.0
                                    print("        n/a")
                                utimes.append(utime)
                                sys.stdout.flush()
                else:
                    if (lang.lower() == "f"):
                        f_source = libtbx.env.find_in_repositories(
                            relative_path="lapack_fem/dsyev_test.f",
                            test=op.isfile,
                            optional=False)
                        build_cmd_compl = build_cmd + " " + f_source
                    else:
                        cpp_source = libtbx.env.find_in_repositories(
                            relative_path="lapack_fem/dsyev_test.cpp",
                            test=op.isfile,
                            optional=False)
                        build_cmd_compl = build_cmd + finalize_cpp_build_cmd(
                            cpp_source)
                    utime = build_run(setup_cmd=setup_cmd,
                                      ld_preload_flag=ld_preload_flag,
                                      n_scatt=n_scatt,
                                      n_refl=n_refl,
                                      build_cmd=build_cmd_compl,
                                      check_max_a_b=False)
                    if (utime is None):
                        print("err")
                        utime = -1.0
                    else:
                        print("min utime: %.2f" % utime)
                    sys.stdout.flush()
                    utimes.append(utime)
                all_utimes.append((utimes, build_cmd + iml))
Exemplo n.º 56
0
def number_of_processors(return_value_if_unknown=None):
    global _number_of_processors
    if (_number_of_processors is Auto):
        _number_of_processors = None
        try:
            import multiprocessing
        except ImportError:
            pass
        else:
            try:
                n = multiprocessing.cpu_count()
            except NotImplementedError:
                pass
            else:
                _number_of_processors = n
        if (_number_of_processors is None):
            try:
                import boost_adaptbx.boost.python as bp
            except ImportError:
                pass
            else:
                n = bp.ext.number_of_processors()
                if (n != 0):
                    _number_of_processors = n
        if (_number_of_processors is None):
            cpuinfo = "/proc/cpuinfo"  # Linux
            if os.path.isfile(cpuinfo):
                n = 0
                for line in open(cpuinfo).read().splitlines():
                    if (not line.startswith("processor")): continue
                    line = line[9:].replace(" ", "").replace("\t", "")
                    if (not line.startswith(":")): continue
                    n += 1
                if (n != 0):
                    _number_of_processors = n
        if (_number_of_processors is None):
            cmd = "/usr/sbin/system_profiler"  # Mac OS X
            if os.path.isfile(cmd):
                keys = [
                    "Total Number Of Cores: ", "Number Of CPUs: ",
                    "Number Of Processors: "
                ]
                ns = [None] * len(keys)
                from libtbx import easy_run
                for line in easy_run.fully_buffered(
                        command=cmd + " SPHardwareDataType").stdout_lines:
                    line = line.strip()
                    for i, key in enumerate(keys):
                        if (line.startswith(key)):
                            try:
                                n = int(line[len(key):])
                            except ValueError:
                                continue
                            if (n > 0 and ns[i] is None):
                                ns[i] = n
                for n in ns:
                    if (n is not None):
                        _number_of_processors = n
                        break
        if (_number_of_processors is None):
            n = os.environ.get("NUMBER_OF_PROCESSORS")  # Windows
            if (n is not None):
                try:
                    n = int(n)
                except ValueError:
                    pass
                else:
                    _number_of_processors = n
        if (_number_of_processors is None):
            cmd = "/sbin/hinv"  # IRIX
            if os.path.isfile(cmd):
                from libtbx import easy_run
                for line in easy_run.fully_buffered(command=cmd).stdout_lines:
                    if (line.endswith(" Processors")):
                        try:
                            n = int(line.split(" ", 1)[0])
                        except ValueError:
                            continue
                        if (n > 0):
                            _number_of_processors = n
                            break
    if (_number_of_processors is not None):
        return _number_of_processors
    return return_value_if_unknown
Exemplo n.º 57
0
 for sizes in [(1, 1), (1, 3), (3, 1), (2, 3)]:
     ni, nj = sizes
     for i in xrange(ni):
         for j in xrange(nj):
             i1d = i * nj + j
             assert nd1d(i1d=i1d, sizes=sizes) == [i, j]
 for sizes in [(1, 1, 1), (1, 3, 1), (3, 2, 1), (4, 3, 2)]:
     ni, nj, nk = sizes
     for i in xrange(ni):
         for j in xrange(nj):
             for k in xrange(nk):
                 i1d = (i * nj + j) * nk + k
                 assert nd1d(i1d=i1d, sizes=sizes) == [i, j, k]
 #
 from libtbx import easy_run
 b = easy_run.fully_buffered(command="libtbx.raise_exception_for_testing")
 for lines in [b.stdout_lines, b.stderr_lines]:
     assert lines[0].startswith("EXCEPTION_INFO: show_stack(0): ")
     assert lines[-1] == "EXCEPTION_INFO: RuntimeError: Just for testing."
 b = easy_run.fully_buffered(
     command="libtbx.raise_exception_for_testing silent")
 b.raise_if_errors_or_output()
 #
 frange = utils.frange
 samples = utils.samples
 assert approx_equal([i / 10. for i in range(-2, 2)],
                     frange(-0.2, 0.2, 0.1))
 assert approx_equal([i / 10. for i in range(-2, 2 + 1)],
                     samples(-0.2, 0.2, 0.1))
 assert approx_equal([i / 10. for i in range(2, -2, -1)],
                     frange(0.2, -0.2, -0.1))
Exemplo n.º 58
0
def run_bond(prefix):
    edits = """
refinement {
  geometry_restraints.edits {
    bond {
      action = %s
      atom_selection_1 = %s
      atom_selection_2 = %s
      distance_ideal = 2.0
      sigma = 1.0
    }
  }
}
"""
    sel_old_1 = "chain A and resseq 7 and name C"
    sel_old_2 = "chain A and resseq 7 and name O"
    sel_new_1 = "chain A and resseq 7 and name CA"
    sel_new_2 = "chain A and resseq 7 and name O"
    cntr = 0
    for action in ["add", "delete", "change"]:
        for kind in ["new", "old"]:
            prefix_ = "%s_bond_%s_%s" % (prefix, kind, action)
            with open("%s.eff" % prefix_, "w") as f:
                f.write(
                    edits %
                    (action, eval("sel_%s_1" % kind), eval("sel_%s_2" % kind)))
            cmd = cmd_base % (prefix, prefix_)
            print(cmd)
            r = easy_run.fully_buffered(cmd)
            if action == "delete":
                cntr += 1
                assert r.stderr_lines[0] == \
                    'Sorry: geometry_restraints.edits.bond.action = delete not implemented.'
            else:
                if kind == "new":
                    if action == 'add':
                        cntr += 1
                        assert_lines_in_file(
                            file_name="%s.pdb.geo" % prefix,
                            lines="""bond pdb=" CA  ILE A   7 "
                             pdb=" O   ILE A   7 "
                          ideal  model  delta    sigma   weight residual
                          2.000  2.394 -0.394 1.00e+00 1.00e+00 1.56e-01""")
                    elif action == 'change':
                        cntr += 1
                        assert r.stderr_lines[
                            0] == "Sorry: Bond below does not exists, use action=add instead."
                elif (kind == "old"):
                    if action == 'add':
                        cntr += 1
                        assert r.stderr_lines[
                            0] == "Sorry: Bond below exists, use action=change instead."
                    elif action == 'change':
                        cntr += 1
                        assert_lines_in_file(
                            file_name="%s.pdb.geo" % prefix,
                            lines="""bond pdb=" C   ILE A   7 "
                                  pdb=" O   ILE A   7 "
                          ideal  model  delta    sigma   weight residual
                          2.000  1.229  0.771 1.00e+00 1.00e+00 5.94e-01""")
    assert cntr == 6
Exemplo n.º 59
0
def test2():
  """Test joint refinement where two detectors are constrained to enforce a
  differential distance (along the shared initial normal vector) of 1 mm.
  This test can be constructed on the fly from data already in
  dials_regression"""

  if not libtbx.env.has_module("dials_regression"):
    print "Skipping test2 in " + __file__ + " as dials_regression not present"
    return

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  # use the 'centroid' data for this test. The 'regularized' experiments are
  # useful because the detector has fast and slow exactly aligned with X, -Y
  # so the distance is exactly along the normal vector and can be altered
  # directly by changing the Z component of the orgin vector
  data_dir = os.path.join(dials_regression, "refinement_test_data", "centroid")
  experiments_path = os.path.join(data_dir, "experiments_XPARM_REGULARIZED.json")
  pickle_path = os.path.join(data_dir, "spot_1000_xds.pickle")

  # work in a temporary directory
  cwd = os.path.abspath(os.curdir)
  tmp_dir = open_tmp_directory(suffix="test_dials_constraints")
  os.chdir(tmp_dir)

  # load the experiments and spots
  el = ExperimentListFactory.from_json_file(experiments_path, check_format=False)
  rt = flex.reflection_table.from_pickle(pickle_path)

  # adjust the detector distance by -0.5 mm
  detector = el[0].detector
  panel = detector[0]
  fast = panel.get_fast_axis()
  slow = panel.get_slow_axis()
  origin = panel.get_origin()
  panel.set_frame(fast, slow, origin[0:2] + (origin[2] + 0.5,))

  # duplicate the experiment and adjust distance by +1 mm
  e2 = deepcopy(el[0])
  detector = e2.detector
  panel = detector[0]
  fast = panel.get_fast_axis()
  slow = panel.get_slow_axis()
  origin = panel.get_origin()
  panel.set_frame(fast, slow, origin[0:2] + (origin[2] - 1.0,))

  # append to the experiment list and write out
  el.append(e2)
  dump = ExperimentListDumper(el)
  dump.as_json('foo_experiments.json')

  # duplicate the reflections and increment the experiment id
  rt2 = deepcopy(rt)
  rt2['id'] = rt2['id'] + 1

  # concatenate reflections and write out
  rt.extend(rt2)
  rt.as_pickle('foo_reflections.pickle')

  # set up refinement, constraining the distance parameter
  cmd = ("dials.refine foo_experiments.json foo_reflections.pickle "
         "history=history.pickle refinement.parameterisation.detector."
         "constraints.parameter=Dist")
  try:
    result = easy_run.fully_buffered(command=cmd).raise_if_errors()
    # load refinement history
    import cPickle as pickle
    with open('history.pickle') as f:
      history = pickle.load(f)
    ref_exp = ExperimentListFactory.from_json_file('refined_experiments.json',
      check_format=False)
  finally:
    os.chdir(cwd)

  # we expect 8 steps of constrained refinement
  assert history.get_nrows()  == 8

  # get parameter vector from the final step
  pvec = history['parameter_vector'][-1]

  # the constrained parameters have indices 0 and 6 in this case. Check they
  # are still exactly 1 mm apart
  assert pvec[0] == pvec[6] - 1.0

  # NB because the other detector parameters were not also constrained, the
  # refined lab frame distances may not in fact differ by 1 mm. The constraint
  # acts along the initial detector normal vector during composition of a new
  # detector position. After refinement of tilt/twist type rotations,
  # the final distances along the new normal vectors will change
  det1, det2 = ref_exp.detectors()
  p1 = det1[0]
  p2 = det2[0]
  assert approx_equal(p2.get_distance() - p1.get_distance(), 0.9987655)

  print "OK"
Exemplo n.º 60
0
def run():
    cmd = os.path.join(libtbx.env.under_build('bin'), 'libtbx.import_all_ext')
    easy_run.fully_buffered(cmd).raise_if_errors()
    print "OK"