def exercise_cablam(): regression_pdb = libtbx.env.find_in_repositories( relative_path="phenix_regression/pdb/pdb103l.ent", test=os.path.isfile) #This is the same file used for tst_kinemage.py if (regression_pdb is None): print( "Skipping exercise_cablam(): input pdb (pdb103l.ent) not available" ) return #----- pdb_io = pdb.input(regression_pdb) pdbid = os.path.basename(regression_pdb) hierarchy = pdb_io.construct_hierarchy() oneline_test = cablam_test_string() cablam_validate.oneline(hierarchy, peptide_cutoff=0.05, peptide_bad_cutoff=0.01, ca_cutoff=0.005, pdbid=pdbid, writeto=oneline_test) text_test = cablam_test_string() outliers = cablam_validate.analyze_pdb(hierarchy, outlier_cutoff=0.05, pdbid=pdbid) cablam_validate.give_text(outliers, writeto=text_test) #print '|',oneline_test.output,'|' #print '|',ref_cablam_give_oneline,'|' #print '|',text_test.output,'|' #print '|',ref_cablam_give_text,'|' assert not show_diff(oneline_test.output, ref_cablam_give_oneline) assert not show_diff(text_test.output, ref_cablam_give_text)
def exercise_correct_rhombohedral_setting_if_necessary(): for symbol in sgtbx.rhombohedral_hermann_mauguin_symbols: for p, z in [("20 20 21 90 90 120", "R"), ("31 31 31 85 85 85", "H")]: uc = uctbx.unit_cell(p) cs = crystal.symmetry( unit_cell=uc, space_group_symbol=symbol + ":" + z, correct_rhombohedral_setting_if_necessary=True) assert cs.unit_cell().is_similar_to(uc) other_z = {"R": "H", "H": "R"}[z] assert not show_diff(cs.space_group_info().type().lookup_symbol(), symbol + " :" + other_z) cs = crystal.symmetry(unit_cell="20 20 21 90 89 120", space_group_symbol="R3:R", correct_rhombohedral_setting_if_necessary=True, assert_is_compatible_unit_cell=False) sio = StringIO() cs.show_summary(f=sio) assert not show_diff( sio.getvalue(), """\ Unit cell: (20.3388, 20.3388, 20.3388, 98.9315, 98.9315, 98.9315) Space group: R 3 :R (No. 146) """) cs = crystal.symmetry(unit_cell="31 31 31 85 85 86", space_group_symbol="R3:H", correct_rhombohedral_setting_if_necessary=True, assert_is_compatible_unit_cell=False) sio = StringIO() cs.show_summary(f=sio) assert not show_diff( sio.getvalue(), """\ Unit cell: (36.4146, 36.4146, 31, 90, 90, 120) Space group: R 3 :H (No. 146) """)
def exercise_converter(): textures = [] gl.glGenTextures(3, textures) assert textures == [0, 0, 0] for i in range(10000): textures = [] gl.glGenTextures(3, textures) assert textures == [0, 0, 0] for i in range(10000): textures = [9, 3, 5] gl.glGenTextures(3, textures) assert textures == [9, 3, 5] try: textures = [9, 3, 5] gl.glGenTextures(4, textures) except RuntimeError as e: assert not show_diff( str(e), """\ Argument "textures" has the wrong number of elements: expected size: 4 given size: 3""") else: raise Exception_expected try: textures = [9, "foo", 5] gl.glGenTextures(3, textures) except RuntimeError as e: assert not show_diff( str(e), """\ Argument "textures" has one or more elements of the wrong type.""") else: raise Exception_expected
def exercise(): verbose = "--verbose" in sys.argv[1:] list_cif = server.mon_lib_list_cif() srv = server.server(list_cif=list_cif) open("tmp.cif", "w").write(tmp_cif) srv.process_cif(file_name="tmp.cif") comp_comp_id = srv.get_comp_comp_id_direct(comp_id="tst") motif = comp_comp_id.as_geometry_restraints_motif() out = StringIO() motif.show(out=out) if (verbose): sys.stdout.write(out.getvalue()) assert not show_diff(out.getvalue(), expected_out_tst_comp) for link_link_id in srv.link_link_id_list: out = StringIO() link_link_id.as_geometry_restraints_motif_manipulation().show(out=out) if (verbose): sys.stdout.write(out.getvalue()) if (link_link_id.chem_link.id == "tst_lnk"): assert not show_diff(out.getvalue(), expected_out_tst_lnk) for mod_mod_id in srv.mod_mod_id_list: out = StringIO() mod_mod_id.as_geometry_restraints_motif_manipulation().show(out=out) if (verbose): sys.stdout.write(out.getvalue()) if (mod_mod_id.chem_mod.id == "tst_mod"): assert not show_diff(out.getvalue(), expected_out_tst_mod) print "OK"
def run_and_compare_implementations(this_script, n, m, iprint): outputs = [] for impl in ["fortran", "raw_reference", "raw"]: if impl == "fortran" and not have_lbfgs_fem: continue cmd = 'scitbx.python "%s" %s %d %d %d %d' % (this_script, impl, n, m, iprint[0], iprint[1]) out = run_cmd(cmd=cmd) if impl == "fortran": out = out.replace("D-", "E-").replace("D+", "E+") out = replace_e0dd_with_edd(out=out) out = out.replace("E-00", "E+00") out = truncate_floats(out=out) outputs.append(out) assert len(outputs) >= 2 a = outputs[0] for b in outputs[1:]: if sys.platform != 'darwin': assert a == b, show_diff(b, a) elif a != b: show_diff(b, a) # We need this to cover up test failure with Xcode >=7.3 for lia, lib in zip(a.splitlines(), b.splitlines()): if lia != lib: gnorma = lia.split()[3] gnormb = lib.split()[3] assert abs(float(gnorma) - float(gnormb)) <= 1e-6
def exercise_cablam(): regression_pdb = libtbx.env.find_in_repositories( relative_path="phenix_regression/pdb/pdb103l.ent", test=os.path.isfile) #This is the same file used for tst_kinemage.py if (regression_pdb is None): print "Skipping exercise_cablam(): input pdb (pdb103l.ent) not available" return #----- pdb_io = pdb.input(regression_pdb) pdbid = os.path.basename(regression_pdb) hierarchy = pdb_io.construct_hierarchy() oneline_test = cablam_test_string() cablam_validate.oneline(hierarchy, peptide_cutoff=0.05, peptide_bad_cutoff=0.01, ca_cutoff=0.005, pdbid=pdbid, writeto=oneline_test) text_test = cablam_test_string() outliers = cablam_validate.analyze_pdb(hierarchy, outlier_cutoff=0.05, pdbid=pdbid) cablam_validate.give_text(outliers, writeto=text_test) #print '|',oneline_test.output,'|' #print '|',ref_cablam_give_oneline,'|' #print '|',text_test.output,'|' #print '|',ref_cablam_give_text,'|' assert not show_diff(oneline_test.output , ref_cablam_give_oneline) assert not show_diff(text_test.output , ref_cablam_give_text)
def exercise_str_repr(): sgi = sgtbx.space_group_info('P1') uc = sgi.any_compatible_unit_cell(volume=1000) cs = crystal.symmetry(unit_cell=None, space_group=None) assert eval(repr(cs)).is_similar_symmetry(cs) assert not show_diff(str(cs), """\ Unit cell: None Space group: None""") cs = crystal.symmetry(unit_cell=uc, space_group=None) assert eval(repr(cs)).is_similar_symmetry(cs, 1e-8, 1e-3) assert not show_diff( str(cs), """\ Unit cell: (8.52593, 11.0837, 14.4941, 83, 109, 129) Space group: None""") cs = crystal.symmetry(unit_cell=None, space_group=sgi.group()) assert eval(repr(cs)).is_similar_symmetry(cs, 1e-8, 1e-3) assert not show_diff(str(cs), """\ Unit cell: None Space group: P 1 (No. 1)""") cs = crystal.symmetry(unit_cell=uc, space_group=sgi.group()) assert eval(repr(cs)).is_similar_symmetry(cs, 1e-8, 1e-3) assert not show_diff( str(cs), """\ Unit cell: (8.52593, 11.0837, 14.4941, 83, 109, 129) Space group: P 1 (No. 1)""")
def exercise_cablam(): regression_pdb = libtbx.env.find_in_repositories( relative_path="phenix_regression/pdb/pdb103l.ent", test=os.path.isfile) #This is the same file used for tst_kinemage.py if (regression_pdb is None): print "Skipping exercise_cablam(): input pdb (pdb103l.ent) not available" return #----- pdb_io = pdb.input(regression_pdb) pdbid = os.path.basename(regression_pdb) hierarchy = pdb_io.construct_hierarchy() output_holder = cablam_test_string() cablamalyze = cablam.cablamalyze( pdb_hierarchy = hierarchy, outliers_only=False, out=output_holder, quiet=False) cablamalyze.as_oneline() assert not show_diff(output_holder.output , ref_cablam_oneline) output_holder.output = "" cablamalyze.as_text() assert not show_diff(output_holder.output , ref_cablam_text)
def exercise_cablam(): regression_pdb = libtbx.env.find_in_repositories( relative_path="phenix_regression/pdb/pdb103l.ent", test=os.path.isfile) #This is the same file used for tst_kinemage.py if (regression_pdb is None): print "Skipping exercise_cablam(): input pdb (pdb103l.ent) not available" return #----- pdb_io = pdb.input(regression_pdb) pdbid = os.path.basename(regression_pdb) hierarchy = pdb_io.construct_hierarchy() output_holder = cablam_test_string() cablamalyze = cablam.cablamalyze(pdb_hierarchy=hierarchy, outliers_only=False, out=output_holder, quiet=False) cablamalyze.as_oneline() assert not show_diff(output_holder.output, ref_cablam_oneline) output_holder.output = "" cablamalyze.as_text() assert not show_diff(output_holder.output, ref_cablam_text)
def exercise_adopt_phil(): master_phil = libtbx.phil.parse("""\ scope1 { a = 1 .type = int b = 2 .type = int } """) working_phil = libtbx.phil.parse("""\ scope1.a = 3 scope1.b = 4 """) i = libtbx.phil.interface.index(master_phil=master_phil, working_phil=working_phil, fetch_new=True) params = i.get_python_object() other_master_phil = libtbx.phil.parse("""\ scope1 { c = 3 .type = int } scope2 { subscope2 { d = 4 .type = int } e = 5 .type = int } """) i.adopt_phil(phil_object=other_master_phil) scope1 = i.get_scope_by_name("scope1") s = StringIO() scope1.show(out=s) assert not show_diff(s.getvalue(), """\ scope1 { a = 3 b = 4 c = 3 } """) s = StringIO() i.working_phil.show(out=s) assert not show_diff( s.getvalue(), """\ scope1 { a = 3 b = 4 c = 3 } scope2 { subscope2 { d = 4 } e = 5 } """)
def exercise(args): verbose = "--verbose" in args if (verbose): log = sys.stdout else: log = None mon_lib_srv = monomer_library.server.server() ener_lib = monomer_library.server.ener_lib() ncs_dir = libtbx.env.find_in_repositories( relative_path="phenix_regression/ncs", test=os.path.isdir) if (ncs_dir is None): print "Skipping exercise(): input files not available" else: for file_name in ["simple.pdb", "ambiguous_alignment.pdb"]: model = get_model(os.path.join(ncs_dir, file_name)) group = ncs.restraints.group.from_atom_selections( model = model, reference_selection_string=None, selection_strings=["chain A", "chain B"], coordinate_sigma=None, b_factor_weight=None, special_position_warnings_only=False) assert len(group.selection_pairs) == 1 assert list(group.selection_pairs[0][0]) == [0,1,2,3] if (file_name == "simple.pdb"): assert list(group.selection_pairs[0][1]) == [4,5,6,7] else: assert list(group.selection_pairs[0][1]) == [4,6,7,8] model = get_model(os.path.join(ncs_dir, "no_match.pdb")) try: ncs.restraints.group.from_atom_selections( model=model, reference_selection_string=None, selection_strings=["chain A", "chain B"], coordinate_sigma=None, b_factor_weight=None, special_position_warnings_only=False) except Sorry, e: assert not show_diff(str(e), '''\ NCS restraints selections do not produce any pairs of matching atoms: Reference selection: "chain A" Other selection: "chain B"''') else: raise Exception_expected try: ncs.restraints.group.from_atom_selections( model=model, reference_selection_string=None, selection_strings=["chain A", "chain C"], coordinate_sigma=None, b_factor_weight=None, special_position_warnings_only=False) except Sorry, e: assert not show_diff(str(e), '''\ NCS restraints selections produce only one pair of matching atoms: Reference selection: "chain A" Other selection: "chain C"''')
def exercise_miller_export_as_shelx_hklf(): s = """\ 1 2 -1 23.34 4.56 2 -3 9 12.45 6.12 99999999999999999.9999999.99 -999-999-999-9999.99-9999.99 3 4 5999999.99999999. 3 4 5-99999.9-999999. """ ma = hklf.reader(file_object=StringIO(s)).as_miller_arrays()[0] sio = StringIO() ma.export_as_shelx_hklf(file_object=sio) ma2 = hklf.reader(file_object=StringIO(sio.getvalue())).as_miller_arrays()[0] assert approx_equal(ma.indices(), ma2.indices()) assert approx_equal(ma.data(), ma2.data()) assert approx_equal(ma.sigmas(), ma2.sigmas()) # ma = ma.select(flex.size_t([0])) def check(d, s, f): if (s is not None): s = flex.double([s]) ma2 = ma.array(data=flex.double([d]), sigmas=s) sio = StringIO() ma2.export_as_shelx_hklf(sio, normalise_if_format_overflow=True) assert not show_diff(sio.getvalue(), """\ 1 2 -1%s 0 0 0 0.00 0.00 """ % f) try: ma2.export_as_shelx_hklf(sio) except RuntimeError: pass else: raise Exception_expected check(-12345678, 1, "-999999. 0.08") check(-12345678, None, "-999999. 0.00") check(2, -12345678, " 0.16-999999.") check(123456789, 30, "9999999. 2.43") check(123456789, None, "9999999. 0.00") check(40, 123456789, " 3.249999999.") check(-23456789, 123456789, "-999999.5263153.") check(123456789, -23456789, "5263153.-999999.") # ma = hklf.reader(file_object=StringIO(s)).as_miller_arrays()[0] ma = ma.select(flex.size_t([0,1])) ma2 = ma.array(data=flex.double([123456789, -23456789])) sio = StringIO() ma2.export_as_shelx_hklf(sio, normalise_if_format_overflow=True) assert not show_diff(sio.getvalue(), """\ 1 2 -15263153. 0.00 2 -3 9-999999. 0.00 0 0 0 0.00 0.00 """) ma2 = ma.array(data=flex.double([-23456789, 823456789])) sio = StringIO() ma2.export_as_shelx_hklf(sio, normalise_if_format_overflow=True) assert not show_diff(sio.getvalue(), """\ 1 2 -1-284858. 0.00 2 -3 99999999. 0.00 0 0 0 0.00 0.00 """)
def exercise_adopt_phil(): master_phil = libtbx.phil.parse("""\ scope1 { a = 1 .type = int b = 2 .type = int } """) working_phil = libtbx.phil.parse("""\ scope1.a = 3 scope1.b = 4 """) i = libtbx.phil.interface.index(master_phil=master_phil, working_phil=working_phil, fetch_new=True) params = i.get_python_object() other_master_phil = libtbx.phil.parse("""\ scope1 { c = 3 .type = int } scope2 { subscope2 { d = 4 .type = int } e = 5 .type = int } """) i.adopt_phil(phil_object=other_master_phil) scope1 = i.get_scope_by_name("scope1") s = StringIO() scope1.show(out=s) assert not show_diff(s.getvalue(), """\ scope1 { a = 3 b = 4 c = 3 } """) s = StringIO() i.working_phil.show(out=s) assert not show_diff(s.getvalue(), """\ scope1 { a = 3 b = 4 c = 3 } scope2 { subscope2 { d = 4 } e = 5 } """)
def exercise_basic(): a = flex.double((10, 20)) p = flex.double((-25, 355)) c = flex.polar(a, p, True) f = flex.double((0.3, 0.9)) s = miller.set(crystal_symmetry=crystal.symmetry(unit_cell=(10, 10, 10, 90, 90, 90), space_group_symbol="P1"), indices=flex.miller_index([(1, 2, 3), (-3, 4, -6)]), anomalous_flag=False) out = StringIO() s.array(data=c).as_phases_phs(out=out) assert not show_diff( out.getvalue(), """\ 1 2 3 4999.99 1.00 -25.00 -3 4 -6 9999.99 1.00 -5.00 """) out = StringIO() s.array(data=c).as_phases_phs(out=out, scale_amplitudes=False) assert not show_diff( out.getvalue(), """\ 1 2 3 10.00 1.00 -25.00 -3 4 -6 20.00 1.00 -5.00 """) for phases in [s.array(data=p), p]: out = StringIO() s.array(data=c).amplitudes().as_phases_phs(out=out, phases=phases, phases_deg=True) assert not show_diff( out.getvalue(), """\ 1 2 3 4999.99 1.00 -25.00 -3 4 -6 9999.99 1.00 355.00 """) for phases in [s.array(data=p * (math.pi / 180)), p * (math.pi / 180)]: out = StringIO() s.array(data=c).amplitudes().as_phases_phs(out=out, phases=phases, phases_deg=False) assert not show_diff( out.getvalue(), """\ 1 2 3 4999.99 1.00 -25.00 -3 4 -6 9999.99 1.00 355.00 """) for figures_of_merit in [s.array(data=f), f]: out = StringIO() s.array(data=c).as_phases_phs(out=out, figures_of_merit=figures_of_merit) assert not show_diff( out.getvalue(), """\ 1 2 3 4999.99 0.30 -25.00 -3 4 -6 9999.99 0.90 -5.00 """)
def run(): import os import shutil import fileinput import libtbx.load_env from libtbx import easy_run from libtbx.test_utils import show_diff try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError: print 'FAIL: dials_regression not configured' exit(0) path = os.path.join(dials_regression, "experiment_test_data/experiment_1.json") newpath = os.path.join(os.getcwd(), 'experiments.json') shutil.copyfile(path, newpath) for line in fileinput.FileInput(newpath, inplace=True): if '$DIALS_REGRESSION' in line: line = line.replace('$DIALS_REGRESSION', dials_regression) print line cmd = "dials.export format=mosflm %s" % newpath result = easy_run.fully_buffered(cmd).raise_if_errors() assert os.path.exists("mosflm/index.mat") with open("mosflm/index.mat", "rb") as f: lines = f.read() assert not show_diff( lines, """\ -0.01210200 -0.01954526 0.00309519 -0.00416605 -0.00080573 -0.02427340 0.01931593 -0.01241956 -0.00329641 0.000 0.000 0.000 -0.52228050 -0.84350975 0.12535704 -0.17980379 -0.03477015 -0.98308781 0.83360283 -0.53598726 -0.13350648 42.2717 42.2720 39.6704 90.0001 89.9993 89.9998 0.000 0.000 0.000 """) assert os.path.exists("mosflm/mosflm.in") with open("mosflm/mosflm.in", "rb") as f: lines = f.read() assert lines.startswith("DIRECTORY") assert not show_diff( "\n".join(lines.split("\n")[1:]), """\ TEMPLATE centroid_####.cbf SYMMETRY 89 BEAM 220.002 212.478 DISTANCE 190.1800 MATRIX index.mat """)
def exercise_pair_sym_table_tidy_and_full_connectivity(): def check_one_way(pst): for sym_pair in pst.iterator(): i_seq, j_seq = sym_pair.i_seqs() assert i_seq <= j_seq assert len(pst[i_seq][j_seq]) > 0 if (i_seq != j_seq): assert i_seq not in pst[j_seq] def check_two_way(pst): for sym_pair in pst.iterator(): i_seq, j_seq = sym_pair.i_seqs() assert len(pst[i_seq][j_seq]) > 0 assert len(pst[j_seq][i_seq]) > 0 pst_extracted = bond_sym_table.tidy( site_symmetry_table=structure.site_symmetry_table()) check_one_way(pst_extracted) sio_extracted = StringIO() structure.pair_sym_table_show(pst_extracted, out=sio_extracted) pst = pst_extracted.tidy( site_symmetry_table=structure.site_symmetry_table()) check_one_way(pst) sio = StringIO() structure.pair_sym_table_show(pst, out=sio) assert not show_diff(sio.getvalue(), sio_extracted.getvalue()) pst = pst_extracted.full_connectivity() check_two_way(pst) pst_full = pst_extracted.full_connectivity( site_symmetry_table=structure.site_symmetry_table()) check_two_way(pst_full) sio = StringIO() structure.pair_sym_table_show(pst_full, is_full_connectivity=True, out=sio) assert sio.getvalue().find("sym. equiv.") < 0 pst = pst_full.tidy( site_symmetry_table=structure.site_symmetry_table()) check_one_way(pst) sio = StringIO() structure.pair_sym_table_show(pst, out=sio) assert not show_diff(sio.getvalue(), sio_extracted.getvalue()) pst_full2 = pst_full.full_connectivity( site_symmetry_table=structure.site_symmetry_table()) check_two_way(pst_full2) pst = pst_full2.tidy( site_symmetry_table=structure.site_symmetry_table()) check_one_way(pst) sio = StringIO() structure.pair_sym_table_show(pst, out=sio) assert not show_diff(sio.getvalue(), sio_extracted.getvalue())
def tst_to_cif_sheet(): pdb_str1 = """\ SHEET 1 AA1 4 ILE A 7 PRO A 10 0 SHEET 2 AA1 4 MET A 92 THR A 99 1 O LEU A 95 N ILE A 7 SHEET 3 AA1 4 THR A 73 SER A 82 -1 N LEU A 76 O ILE A 94 SHEET 4 AA1 4 VAL A 34 THR A 39 -1 N PHE A 35 O VAL A 81 SHEET 1 AA2 3 LYS A 19 GLN A 23 0 SHEET 2 AA2 3 TRP A 59 VAL A 62 -1 O LEU A 60 N VAL A 22 SHEET 3 AA2 3 PHE A 51 ILE A 53 -1 N ILE A 52 O LYS A 61""" ann = annotation.from_records(pdb_str1.split("\n")) cif_loops = ann.as_cif_loops() assert len(cif_loops) == 4, len(cif_loops) out = StringIO() cif_block = iotbx.cif.model.block() for loop in cif_loops: loop.show(out) cif_block.add_loop(loop) v = out.getvalue() # print "\"%s\"" % v ann_back = annotation.from_cif_block(cif_block) assert ann_back.get_n_helices() == 0 assert ann_back.get_n_sheets() == 2 assert [len(x.strands) for x in ann_back.sheets] == [4, 3] # print ann_back.as_pdb_str() assert not show_diff(ann_back.as_pdb_str(), pdb_str1) pdb_str2 = """\ SHEET 1 AA1 4 ILE A 7 PRO A 10 0 SHEET 2 AA1 4 MET A 92 THR A 99 1 O LEU A 95 N ILE A 7 SHEET 3 AA1 4 THR A 73 SER A 82 -1 N LEU A 76 O ILE A 94 SHEET 4 AA1 4 VAL A 34 THR A 39 -1 N PHE A 35 O VAL A 81""" ann = annotation.from_records(pdb_str2.split("\n")) cif_loops = ann.as_cif_loops() assert len(cif_loops) == 4, len(cif_loops) out = StringIO() cif_block = iotbx.cif.model.block() for loop in cif_loops: loop.show(out) cif_block.add_loop(loop) v = out.getvalue() # print "\"%s\"" % v ann_back = annotation.from_cif_block(cif_block) assert ann_back.get_n_helices() == 0 assert ann_back.get_n_sheets() == 1 assert [len(x.strands) for x in ann_back.sheets] == [4] # print ann_back.as_pdb_str() assert not show_diff(ann_back.as_pdb_str(), pdb_str2)
def exercise_pair_sym_table_tidy_and_full_connectivity(): def check_one_way(pst): for sym_pair in pst.iterator(): i_seq, j_seq = sym_pair.i_seqs() assert i_seq <= j_seq assert len(pst[i_seq][j_seq]) > 0 if (i_seq != j_seq): assert i_seq not in pst[j_seq] def check_two_way(pst): for sym_pair in pst.iterator(): i_seq, j_seq = sym_pair.i_seqs() assert len(pst[i_seq][j_seq]) > 0 assert len(pst[j_seq][i_seq]) > 0 pst_extracted = bond_sym_table.tidy( site_symmetry_table=structure.site_symmetry_table()) check_one_way(pst_extracted) sio_extracted = StringIO() structure.pair_sym_table_show(pst_extracted, out=sio_extracted) pst = pst_extracted.tidy( site_symmetry_table=structure.site_symmetry_table()) check_one_way(pst) sio = StringIO() structure.pair_sym_table_show(pst, out=sio) assert not show_diff(sio.getvalue(), sio_extracted.getvalue()) pst = pst_extracted.full_connectivity() check_two_way(pst) pst_full = pst_extracted.full_connectivity( site_symmetry_table=structure.site_symmetry_table()) check_two_way(pst_full) sio = StringIO() structure.pair_sym_table_show( pst_full, is_full_connectivity=True, out=sio) assert sio.getvalue().find("sym. equiv.") < 0 pst = pst_full.tidy( site_symmetry_table=structure.site_symmetry_table()) check_one_way(pst) sio = StringIO() structure.pair_sym_table_show(pst, out=sio) assert not show_diff(sio.getvalue(), sio_extracted.getvalue()) pst_full2 = pst_full.full_connectivity( site_symmetry_table=structure.site_symmetry_table()) check_two_way(pst_full2) pst = pst_full2.tidy( site_symmetry_table=structure.site_symmetry_table()) check_one_way(pst) sio = StringIO() structure.pair_sym_table_show(pst, out=sio) assert not show_diff(sio.getvalue(), sio_extracted.getvalue())
def exercise_03(): answer_phil_str = """\ protein.sheet { sheet_id = " 3" first_strand = chain 'A' and resid 157 through 161 strand { selection = chain 'A' and resid 114 through 122 sense = antiparallel bond_start_current = None bond_start_previous = None } strand { selection = chain 'A' and resid 193 through 201 sense = parallel bond_start_current = None bond_start_previous = None } }""" annot = ss.annotation.from_records(records=ann_3.split('\n')) assert len(annot.helices) == 0 assert len(annot.sheets) == 1 ann_sheet = annot.sheets[0] assert ann_sheet.n_strands == 3 assert ann_sheet.registrations == [None]*3 only_sheet = annot.sheets[0] phil_string = annot.as_restraint_groups() assert not test_utils.show_diff(phil_string, answer_phil_str, strip_trailing_whitespace=True)
def run(args): assert len(args) == 0 import libtbx.load_env import os op = os.path cbf = libtbx.env.under_dist( module_name="cbflib", path="examples/fit2d_data.cbf") assert op.isfile(cbf) from cbflib_adaptbx.command_line import dump from cStringIO import StringIO sio = StringIO() dump.process(file_name=cbf, out=sio) from libtbx.test_utils import show_diff assert not show_diff(sio.getvalue(), """\ File name: %s Number of blocks: 1 Block name: image_1 Number of categories: 12 Category name: diffrn Category name: diffrn_source Category name: diffrn_radiation Category name: diffrn_radiation_wavelength Category name: diffrn_measurement Category name: diffrn_detector Category name: diffrn_detector_element Category name: diffrn_data_frame Category name: array_structure_list Category name: array_element_size Category name: array_intensities Category name: array_data """ % cbf) print "OK"
def exercise_03(): answer_phil_str = """\ protein.sheet { sheet_id = " 3" first_strand = chain 'A' and resid 157 through 161 strand { selection = chain 'A' and resid 114 through 122 sense = antiparallel bond_start_current = None bond_start_previous = None } strand { selection = chain 'A' and resid 193 through 201 sense = parallel bond_start_current = None bond_start_previous = None } }""" annot = ss.annotation.from_records(records=ann_3.split('\n')) assert len(annot.helices) == 0 assert len(annot.sheets) == 1 ann_sheet = annot.sheets[0] assert ann_sheet.n_strands == 3 assert ann_sheet.registrations == [None] * 3 only_sheet = annot.sheets[0] phil_string = annot.as_restraint_groups() assert not test_utils.show_diff( phil_string, answer_phil_str, strip_trailing_whitespace=True)
def exercise_heavy () : from mmtbx.regression import make_fake_anomalous_data from mmtbx.command_line import validate_waters import mmtbx.ions.utils from iotbx.file_reader import any_file file_base = "tst_validate_waters_1" pdb_file = make_fake_anomalous_data.write_pdb_input_cd_cl(file_base=file_base) mtz_file = make_fake_anomalous_data.generate_mtz_file( file_base="tst_validate_waters_1", d_min=1.5, anomalous_scatterers=[ group_args(selection="element CD", fp=-0.29, fdp=2.676), group_args(selection="element CL", fp=0.256, fdp=0.5), ]) pdb_in = any_file(pdb_file) hierarchy = pdb_in.file_object.hierarchy hierarchy, n = mmtbx.ions.utils.anonymize_ions(hierarchy, log=null_out()) hierarchy.write_pdb_file("%s_start.pdb" % file_base, crystal_symmetry=pdb_in.file_object.crystal_symmetry()) args = ["tst_validate_waters_1_start.pdb", "tst_validate_waters_1.mtz", "skip_twin_detection=True"] results = validate_waters.run(args=args, out=null_out()) out = StringIO() results.show(out=out) s = easy_pickle.dumps(results) r2 = easy_pickle.loads(s) out2 = StringIO() r2.show(out=out2) assert not show_diff(out.getvalue(), out2.getvalue()) assert (results.n_bad >= 1) and (results.n_heavy == 2)
def run_and_compare_implementations(this_script, n, m, iprint): outputs = [] for impl in ["fortran", "raw_reference", "raw"]: if (impl == "fortran" and not have_lbfgs_fem): continue cmd = 'scitbx.python "%s" %s %d %d %d %d' % ( this_script, impl, n, m, iprint[0], iprint[1]) out = run_cmd(cmd=cmd) if (impl == "fortran"): out = out.replace("D-", "E-").replace("D+", "E+") out = replace_e0dd_with_edd(out=out) out = out.replace("E-00", "E+00") out = truncate_floats(out=out) outputs.append(out) assert len(outputs) >= 2 a = outputs[0] for i in xrange(1, len(outputs)): b = outputs[i] if show_diff(a, b): # We need this to cover up test failure with Xcode 7.3 for lia, lib in zip(a.splitlines(), b.splitlines()): if lia != lib: gnorma = lia.split()[3] gnormb = lib.split()[3] assert abs(float(gnorma) - float(gnormb)) <= 1e-7
def exercise_pro_missing_hd1(mon_lib_srv): pdb_inp = iotbx.pdb.input(source_info=None, lines="""\ ATOM 110 N PRO A 263 0.453 -20.680 -39.256 1.00 53.34 N ATOM 111 CA PRO A 263 0.444 -22.054 -39.751 1.00 50.42 C ATOM 112 C PRO A 263 0.860 -22.998 -38.645 1.00 52.10 C ATOM 113 O PRO A 263 1.693 -22.614 -37.817 1.00 48.32 O ATOM 114 CB PRO A 263 1.491 -22.052 -40.887 1.00 53.30 C ATOM 115 CG PRO A 263 2.012 -20.645 -40.990 1.00 57.05 C ATOM 116 CD PRO A 263 1.586 -19.897 -39.782 1.00 53.45 C ATOM 117 HA PRO A 263 -0.437 -22.302 -40.100 1.00 60.51 H ATOM 118 HB2 PRO A 263 2.210 -22.664 -40.664 1.00 63.96 H ATOM 119 HB3 PRO A 263 1.066 -22.318 -41.718 1.00 63.96 H ATOM 120 HG2 PRO A 263 2.980 -20.669 -41.043 1.00 68.45 H ATOM 121 HG3 PRO A 263 1.645 -20.229 -41.786 1.00 68.45 H ATOM 122 HD2 PRO A 263 1.267 -19.021 -40.049 1.00 64.14 H """) pdb_hierarchy = pdb_inp.construct_hierarchy() residue = pdb_hierarchy.only_residue() rotamer_iterator = mon_lib_srv.rotamer_iterator( comp_id=residue.resname, atom_names=residue.atoms().extract_name(), sites_cart=residue.atoms().extract_xyz()) assert not show_diff( rotamer_iterator.problem_message, 'resname=PRO: missing atom "HD1" for tor_id "hh3"')
def exercise_group_args(): from libtbx import group_args from cStringIO import StringIO out = StringIO() a = group_args(a=1, b=2, c=3) assert a.a == 1 assert a.b == 2 assert a.c == 3 b = group_args(d='d', e='e') assert b.d == 'd' assert b.e == 'e' print >> out, a v = out.getvalue() assert not show_diff( v, """group_args a : 1 b : 2 c : 3\n""") a.merge(b) assert a.a == 1 assert a.b == 2 assert a.c == 3 assert a.d == 'd' assert a.e == 'e' assert b.d == 'd' assert b.e == 'e' c = group_args(a=11, b=12) a.merge(c) assert a.a == 11 assert a.b == 12 assert a.c == 3 assert c.a == 11 assert c.b == 12
def exercise_heavy(): from mmtbx.regression import make_fake_anomalous_data from mmtbx.command_line import validate_waters import mmtbx.ions.utils from iotbx.file_reader import any_file file_base = "tst_validate_waters_1" pdb_file = make_fake_anomalous_data.write_pdb_input_cd_cl( file_base=file_base) mtz_file = make_fake_anomalous_data.generate_mtz_file( file_base="tst_validate_waters_1", d_min=1.5, anomalous_scatterers=[ group_args(selection="element CD", fp=-0.29, fdp=2.676), group_args(selection="element CL", fp=0.256, fdp=0.5), ]) pdb_in = any_file(pdb_file) hierarchy = pdb_in.file_object.hierarchy hierarchy, n = mmtbx.ions.utils.anonymize_ions(hierarchy, log=null_out()) hierarchy.write_pdb_file( "%s_start.pdb" % file_base, crystal_symmetry=pdb_in.file_object.crystal_symmetry()) args = [ "tst_validate_waters_1_start.pdb", "tst_validate_waters_1.mtz", "skip_twin_detection=True" ] results = validate_waters.run(args=args, out=null_out()) out = StringIO() results.show(out=out) s = easy_pickle.dumps(results) r2 = easy_pickle.loads(s) out2 = StringIO() r2.show(out=out2) assert not show_diff(out.getvalue(), out2.getvalue()) assert (results.n_bad >= 1) and (results.n_heavy == 2)
def run(): import os import libtbx.load_env from libtbx import easy_run from libtbx.test_utils import show_diff try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError: print 'FAIL: dials_regression not configured' exit(0) path = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data") cmd = "dials.compare_orientation_matrices %s/experiments.json %s/regression_experiments.json" % ( path, path) result = easy_run.fully_buffered(cmd).raise_if_errors() out = "\n".join(result.stdout_lines[7:]) out = out.replace("-0", "0") assert not show_diff( out, """\ Change of basis op: a,b,c Rotation matrix to transform crystal 1 to crystal 2: {{1.000, 0.000, 0.000}, {0.000, 1.000, 0.000}, {0.000, 0.000, 1.000}} Rotation of 0.002 degrees about axis (0.916, 0.081, 0.393) """)
def check_tls_params(params1, params2): for tls1, tls2 in zip(params1, params2): assert approx_equal(tls1.t, tls2.t) assert approx_equal(tls1.l, tls2.l) assert approx_equal(tls1.s, tls2.s) assert approx_equal(tls1.origin, tls2.origin) assert not show_diff(tls1.selection_string, tls2.selection_string)
def exercise_1(prefix="tst_model_remove_alternative_conformations_1"): """ Make sure that CA in SER3 gets to the right position. """ inp = iotbx.pdb.input(lines=tst_pdb_str, source_info=None) model = mmtbx.model.manager(model_input=inp) model.remove_alternative_conformations(always_keep_one_conformer=False) assert not show_diff( model.model_as_pdb(), """\ ATOM 1 N PHE L 2 201.672 235.270 272.436 1.00 33.60 N ATOM 2 CA PHE L 2 201.753 236.596 271.840 1.00 33.60 C ATOM 3 C PHE L 2 200.597 237.520 272.192 1.00 33.60 C ATOM 4 O PHE L 2 200.835 238.721 272.367 1.00 33.60 O ATOM 5 CB PHE L 2 201.883 236.467 270.321 1.00 32.90 C ATOM 6 CG PHE L 2 201.850 237.775 269.616 1.00 32.90 C ATOM 7 CD1 PHE L 2 202.865 238.689 269.797 1.00 32.90 C ATOM 8 CD2 PHE L 2 200.820 238.083 268.748 1.00 32.90 C ATOM 9 CE1 PHE L 2 202.829 239.908 269.166 1.00 32.90 C ATOM 10 CE2 PHE L 2 200.799 239.290 268.089 1.00 32.90 C ATOM 11 CZ PHE L 2 201.803 240.201 268.300 1.00 32.90 C ATOM 12 N SER L 3 199.357 236.979 272.328 1.00 34.28 N ATOM 13 CA SER L 3 198.124 237.714 272.618 1.00 34.28 C ATOM 14 C SER L 3 197.907 238.857 271.631 1.00 34.28 C ATOM 15 O SER L 3 198.129 240.024 271.974 1.00 34.28 O ATOM 16 CB SER L 3 198.121 238.241 274.053 1.00 35.83 C ATOM 17 OG SER L 3 198.875 239.432 274.158 1.00 35.83 O ATOM 18 N PRO L 4 197.482 238.564 270.395 1.00 33.41 N ATOM 19 CA PRO L 4 197.440 239.596 269.355 1.00 33.41 C ATOM 20 C PRO L 4 196.353 240.646 269.519 1.00 33.41 C ATOM 21 O PRO L 4 196.212 241.494 268.633 1.00 33.41 O ATOM 22 CB PRO L 4 197.209 238.776 268.079 1.00 33.14 C ATOM 23 CG PRO L 4 196.474 237.597 268.538 1.00 33.14 C ATOM 24 CD PRO L 4 197.000 237.268 269.901 1.00 33.14 C TER END """)
def tst_1(prefix="tst_split_data_cif_1"): with open(prefix + '.cif', 'w') as f: f.write(sf_5r82) easy_run.fully_buffered('iotbx.split_data_cif %s.cif' % prefix) original_model = iotbx.cif.reader(input_string=sf_5r82).model() block_names = original_model.keys() for bn in block_names: assert os.path.isfile('%s.cif_%s_000.mtz' % (prefix, bn)) assert os.path.isfile('%s.cif_%s_000.cif' % (prefix, bn)) # testing output cif for bn in block_names: m = iotbx.cif.reader(file_path='%s.cif_%s_000.cif' % (prefix, bn)).model() for k in original_model[bn].keys(): assert approx_equal(m[bn][k], original_model[bn][k]) # testing output mtz for bn in block_names: mtz_obj = iotbx.mtz.object('%s.cif_%s_000.mtz' % (prefix, bn)) # print('working with', '%s.cif_%s_000.mtz' % (prefix, bn)) strio = StringIO() mtz_obj.show_summary(out=strio) # mtz_obj.show_column_data(out=strio) val = strio.getvalue() assert not show_diff(val, sf_5r82_mtz_results[bn])
def make_geo_pickle_unpickle(geometry, xrs, prefix): init_out = StringIO.StringIO() from_file_out = StringIO.StringIO() geometry.show_sorted( sites_cart=xrs.sites_cart(), site_labels=xrs.scatterers().extract_labels(), f=init_out) t0 = time() pklfile = open("%s.pkl" % prefix, 'wb') #import code, traceback; code.interact(local=locals(), banner="".join( traceback.format_stack(limit=10) ) ) pickle.dump(geometry, pklfile) pklfile.close() t1 = time() pklfile = open("%s.pkl" % prefix, 'rb') grm_from_file = pickle.load(pklfile) pklfile.close() t2 = time() print "Time pickling/unpickling: %.4f, %.4f" % (t1-t0, t2-t1) grm_from_file.show_sorted( sites_cart=xrs.sites_cart(), site_labels=xrs.scatterers().extract_labels(), f=from_file_out) # print "INITIAL" init_v = init_out.getvalue() # print init_v # print "="*50 # print "From disc" from_file_v = from_file_out.getvalue() # print from_file_v # STOP() assert not show_diff(init_v, from_file_v)
def run(args): assert args in [[], ["--forever"]] verbose = True while True: if (flex.int().as_numpy_array(optional=True) is None): try: flex.int().as_numpy_array() except RuntimeError, e: assert not show_diff(str(e), "numpy API not available") else: raise Exception_expected else: for flex_type in [ flex.bool, flex.int, flex.long, flex.float, flex.double, flex.complex_double, flex.size_t]: exercise_basic(flex_type, verbose) exercise_int() if (len(args) == 0): break verbose = False
def make_geo_pickle_unpickle(geometry, xrs, prefix): init_out = StringIO() from_file_out = StringIO() geometry.show_sorted(sites_cart=xrs.sites_cart(), site_labels=xrs.scatterers().extract_labels(), f=init_out) energy_original = geometry.energies_sites(sites_cart=xrs.sites_cart()) t0 = time() #import code, traceback; code.interact(local=locals(), banner="".join( traceback.format_stack(limit=10) ) ) pklstr = pickle.dumps(geometry) t1 = time() grm_from_file = pickle.loads(pklstr) t2 = time() # Fails here: energy_from_pickle = grm_from_file.energies_sites( sites_cart=xrs.sites_cart()) assert approx_equal(energy_original.target, energy_from_pickle.target) print("Time pickling/unpickling: %.4f, %.4f" % (t1 - t0, t2 - t1)) grm_from_file.show_sorted(sites_cart=xrs.sites_cart(), site_labels=xrs.scatterers().extract_labels(), f=from_file_out) # print "INITIAL" init_v = init_out.getvalue() # print init_v # print "="*50 # print "From disc" from_file_v = from_file_out.getvalue() # print from_file_v # STOP() assert not show_diff(init_v, from_file_v) return grm_from_file
def exercise_show_summary(): from scitbx.graph import test_cases_tardy_pdb tcs = test_cases_tardy_pdb.select_test_cases( tags_or_indices=["ZINC03847120"]) assert len(tcs) == 1 tt = tcs[0].tardy_tree_construct() for vl,cb in [(None, ("10", "11")), (list("ABCDEFGHIKLMNO"), ("L", "M"))]: sio = StringIO() assert tt.show_summary(vertex_labels=vl, out=sio, prefix="&") is tt assert not show_diff(sio.getvalue(), """\ &number of vertices: 14 &number of edges: 15 &find cluster loops: 0 repeats &number of fixed vertex lists: 0 &number of fixed vertices: 0 &number of clusters: 1 &merge clusters with multiple connections: 1 pass &number of hinge edges: 1 &number of loop edges: 0 &number of loop edge bendings: 0 &number of fixed hinges: 1 &tardy fixed hinge: %s & %s """ % cb)
def check(text): if (file_name == "intrinsics_extra.f"): check_intrinsics_extra(text) return if (file_name == "sf.f"): text = text.replace(" -0.620088", " -0.620087") elif (file_name == "unformatted_experiments.f"): if (sys.byteorder == "big"): text = text \ .replace( " 1234 5678", " 5678 1234") \ .replace( " 18558553691448", " 23330262356193") have_diffs = show_diff(text, "\n".join(info.out_lines)) def assert_not_have_diffs(): if (opts.keep_going): print "WARNING: --keep-going after show_diff:", exe_name else: assert not have_diffs if (have_diffs): if (exe_name is fem_exe_name): assert_not_have_diffs() elif (exe_name is ifort_exe_name): if ( not info.ifort_diff_behavior and not info.ifort_diff_floating_point_format): assert_not_have_diffs() else: raise AssertionError
def tst_run(args=[]): assert len(args) == 0 import libtbx.load_env import os op = os.path cbf = libtbx.env.under_dist(module_name="cbflib", path="examples/fit2d_data.cbf") assert op.isfile(cbf) from cbflib_adaptbx.command_line import dump from six.moves import cStringIO as StringIO sio = StringIO() dump.process(file_name=cbf, out=sio) from libtbx.test_utils import show_diff assert not show_diff( sio.getvalue(), """\ File name: %s Number of blocks: 1 Block name: image_1 Number of categories: 12 Category name: diffrn Category name: diffrn_source Category name: diffrn_radiation Category name: diffrn_radiation_wavelength Category name: diffrn_measurement Category name: diffrn_detector Category name: diffrn_detector_element Category name: diffrn_data_frame Category name: array_structure_list Category name: array_element_size Category name: array_intensities Category name: array_data """ % cbf) print "OK"
def exercise_001(file_name="tst_mtrix_biomt_cmdl_001.pdb"): """ Make sure SS gets populated by MTRIX """ of = open(file_name, "w") print(pdb_str_2b, file=of) of.close() easy_run.call("phenix.pdb.mtrix_reconstruction %s" % file_name) pdb_inp = iotbx.pdb.input( file_name="tst_mtrix_biomt_cmdl_001_MTRIX_expanded.pdb") a = pdb_inp.extract_secondary_structure() assert a.get_n_helices() == 3, a.get_n_helices() assert a.get_n_sheets() == 3, "%d" % a.get_n_sheets() # checking chain ids. If this part is failing, then something is changed in # chain expanding which made chain ids in hierarchy.py:join_roots() # not compatible with those used in secondary_structure.py:multiply_to_asu chain_ids = [h.start_chain_id for h in a.helices] assert chain_ids == ['A', 'C', 'E'], chain_ids # checking sheets assert not show_diff( a.as_pdb_str(), """\ HELIX 1 1 THR A 1 THR A 2 1 6 HELIX 2 2 THR C 1 THR C 2 1 6 HELIX 3 3 THR E 1 THR E 2 1 6 SHEET 1 1 2 THR A 1 THR A 3 0 SHEET 2 1 2 THR B 4 THR B 5 -1 O THR B 4 N THR A 2 SHEET 1 2 2 THR C 1 THR C 3 0 SHEET 2 2 2 THR D 4 THR D 5 -1 O THR D 4 N THR C 2 SHEET 1 3 2 THR E 1 THR E 3 0 SHEET 2 3 2 THR F 4 THR F 5 -1 O THR F 4 N THR E 2""")
def exercise_mixed_read_write(opts): tmp = "exercise_mixed_read_write.tmp" tst_f = "exercise_mixed_read_write.f" open(tst_f, "w").write("""\ program prog open( & unit=1, & file='%s', & status='old') read(1, '(i2)') num write(6, '(i2)') num*2 write(1, '(i2)') 78 end """ % tmp) for cmd in build_cmds(tst_f=tst_f, opts=opts): if (opts.verbose): print cmd open(tmp, "wb").write("""\ 12 34 56 """) stdout = easy_run.fully_buffered( command=cmd).raise_if_errors().stdout_lines assert stdout == ["24"] tmp_text = open(tmp, "rb").read() assert not show_diff(tmp_text, """\ 12 78 """)
def test_1(): regression_pdb = libtbx.env.find_in_repositories( relative_path="phenix_regression/pdb/2qxs.pdb", test=os.path.isfile) pdb_inp = iotbx.pdb.input(file_name=regression_pdb) h = pdb_inp.construct_hierarchy() stats = mmtbx.model.statistics.geometry(pdb_hierarchy=h) out = StringIO() stats.show(log=out) assert not show_diff( out.getvalue(), """ GEOMETRY RESTRAINTS LIBRARY: NONE DEVIATIONS FROM IDEAL VALUES. BOND : 0.000 0.000 0 ANGLE : 0.000 0.000 0 CHIRALITY : 0.000 0.000 0 PLANARITY : 0.000 0.000 0 DIHEDRAL : 0.000 0.000 0 MIN NONBONDED DISTANCE : 0.000 MOLPROBITY STATISTICS. ALL-ATOM CLASHSCORE : 7.78 RAMACHANDRAN PLOT: OUTLIERS : 0.68 % ALLOWED : 0.90 % FAVORED : 98.42 % ROTAMER OUTLIERS : 4.35 % CBETA DEVIATIONS : 0.87 % PEPTIDE PLANE: CIS-PROLINE : 7.14 % CIS-GENERAL : 0.00 % TWISTED PROLINE : 0.00 % TWISTED GENERAL : 0.42 % """)
def exercise(): atom_names = ["CA", "C", "C", "X", "HB1", "HB2", "HB3"] matched = interpreters["LEU"].match_atom_names(atom_names) s = StringIO() matched.show_problems(out=s, prefix=">") assert not show_diff( s.getvalue(), """\ >unexpected atom names: "X" >multiple matches: expected pattern=C names="C", "C" >mutually exclusive: 1hB 3hB """) ala_names = ["N", "CA", "C", "O", "CB"] ala_missing_names = ["CA", "CB", "O"] ala_with_h = ["N", "CA", "C", "O", "CB", "HA", "HB1", "HB2", "HB3", "H"] ala_matched = interpreters["ALA"].match_atom_names(ala_names) ala_missing = interpreters["ALA"].match_atom_names(ala_missing_names) ala_h = interpreters["ALA"].match_atom_names(ala_with_h) assert ala_matched.missing_atom_names(ignore_hydrogen=True) == set(()) assert ala_missing.missing_atom_names(ignore_hydrogen=True) == set( ("C", "N")) assert ala_h.missing_atom_names(ignore_hydrogen=False) == set(()) print "OK"
def exercise_set_seg_id () : input_pdb = """\ ATOM 1 O GLY A 3 1.434 1.460 2.496 1.00 6.04 O ATOM 2 O CYS A 7 2.196 4.467 3.911 1.00 4.51 O ATOM 3 O CYS A 1 -1.433 4.734 5.405 1.00 7.85 O TER ATOM 4 O SER B 4 0.297 0.843 7.226 1.00 7.65 O ATOM 5 OG ASER B 4 -2.625 1.057 4.064 0.50 5.46 O ATOM 6 OG BSER B 4 -0.885 0.189 3.843 0.50 11.74 O TER ATOM 7 O LEU 0 5.613 12.448 6.864 1.00 7.32 O TER END """ open("tmp_seg_id.pdb", "w").write(input_pdb) easy_run.call("phenix.pdbtools tmp_seg_id.pdb set_seg_id_to_chain_id=True --quiet") pdb_out_1 = open("tmp_seg_id.pdb_modified.pdb").read() assert not show_diff(pdb_out_1, """\ ATOM 1 O GLY A 3 1.434 1.460 2.496 1.00 6.04 A O ATOM 2 O CYS A 7 2.196 4.467 3.911 1.00 4.51 A O ATOM 3 O CYS A 1 -1.433 4.734 5.405 1.00 7.85 A O TER ATOM 4 O SER B 4 0.297 0.843 7.226 1.00 7.65 B O ATOM 5 OG ASER B 4 -2.625 1.057 4.064 0.50 5.46 B O ATOM 6 OG BSER B 4 -0.885 0.189 3.843 0.50 11.74 B O TER ATOM 7 O LEU 0 5.613 12.448 6.864 1.00 7.32 O TER END """) easy_run.call("phenix.pdbtools tmp_seg_id.pdb_modified.pdb clear_seg_id=True --quiet") pdb_out_2 = open("tmp_seg_id.pdb_modified.pdb_modified.pdb").read() assert (pdb_out_2 == input_pdb)
def exercise_input_pickling(): pdb_inp = pdb.pdb_input(source_info="file/name", lines=pdb_string_all_sections) s = pickle.dumps(pdb_inp, 1) l = pickle.loads(s) assert not show_diff(l.as_pdb_string(), pdb_inp.as_pdb_string()) assert l.source_info() == "pickle" for section in pdb.input_sections: assert not show_diff("\n".join(getattr(l, section)()), "\n".join( getattr(pdb_inp, section)())) s = "\n".join(l.__getinitargs__()[1]) d = hashlib.md5(s).hexdigest() if (pdb.hierarchy.atom.has_siguij()): assert d == "bf987c40cc8672e2f2324d91d6de3e2b" else: assert d == "7375e96fd52794a785284580730de20c"
def exercise(): atom_names = ["CA", "C", "C", "X", "HB1", "HB2", "HB3"] matched = interpreters["LEU"].match_atom_names(atom_names) s = StringIO() matched.show_problems(out=s, prefix=">") assert not show_diff( s.getvalue(), """\ >unexpected atom names: "X" >multiple matches: expected pattern=C names="C", "C" >mutually exclusive: 1hB 3hB """, ) ala_names = ["N", "CA", "C", "O", "CB"] ala_missing_names = ["CA", "CB", "O"] ala_with_h = ["N", "CA", "C", "O", "CB", "HA", "HB1", "HB2", "HB3", "H"] ala_matched = interpreters["ALA"].match_atom_names(ala_names) ala_missing = interpreters["ALA"].match_atom_names(ala_missing_names) ala_h = interpreters["ALA"].match_atom_names(ala_with_h) assert ala_matched.missing_atom_names(ignore_hydrogen=True) == set(()) assert ala_missing.missing_atom_names(ignore_hydrogen=True) == set(("C", "N")) assert ala_h.missing_atom_names(ignore_hydrogen=False) == set(()) print "OK"
def exercise_mixed_read_write(opts): tmp = "exercise_mixed_read_write.tmp" tst_f = "exercise_mixed_read_write.f" open(tst_f, "w").write("""\ program prog open( & unit=1, & file='%s', & status='old') read(1, '(i2)') num write(6, '(i2)') num*2 write(1, '(i2)') 78 end """ % tmp) for cmd in build_cmds(tst_f=tst_f, opts=opts): if (opts.verbose): print(cmd) open(tmp, "w").write("""\ 12 34 56 """) stdout = easy_run.fully_buffered( command=cmd).raise_if_errors().stdout_lines assert stdout == ["24"] tmp_text = open(tmp, "rb").read() assert not show_diff( tmp_text, b"""\ 12 78 """.replace(b"\n", os.linesep.encode("latin-1")))
def run_and_check(cmd, pdbs_file, expected_out): abs_paths = [] for rel_path in open(op.join(dd, pdbs_file)).read().splitlines(): abs_paths.append(op.normpath(op.join(dd, rel_path))) list_of_pdbs = "list_of_pdbs_%d" % run_serial.next() print >> open(list_of_pdbs, "w"), "\n".join(abs_paths) cmd = cmd % list_of_pdbs if (valgrind): cmd = "valgrind " + cmd print cmd remove_files("out") if (not valgrind): buffers = easy_run.fully_buffered(command=cmd).raise_if_errors() if (expected_out is None): return buffers.stdout_lines else: easy_run.call(command=cmd) if (expected_out is None): return None filtered_lines = [] for line in open("out").read().splitlines(): sw = line.startswith if (sw("pole") or sw("members") or sw("min: ") or sw("max: ")): filtered_lines.append(line) assert not show_diff("\n".join(filtered_lines)+"\n", expected_out) print "OK"
def exercise_make_atom_with_labels(): awl = pdb.make_atom_with_labels() assert not show_diff( awl.format_atom_record_group(siguij=False), """\ ATOM 0.000 0.000 0.000 0.00 0.00""", ) awl = pdb.make_atom_with_labels( xyz=(1, 2, 3), sigxyz=(4, 5, 6), occ=7, sigocc=8, b=9, sigb=10, uij=(11, 12, 13, 14, 15, 16), siguij=(17, 18, 19, 20, 21, 22), hetero=True, serial="ABCDE", name="FGHI", segid="JKLM", element="NO", charge="PQ", model_id="RSTU", chain_id="VW", resseq="XYZa", icode="b", altloc="c", resname="def", ) expected = """\ HETATMABCDE FGHIcdefVWXYZab 1.000 2.000 3.000 7.00 9.00 JKLMNOPQ SIGATMABCDE FGHIcdefVWXYZab 4.000 5.000 6.000 8.00 10.00 JKLMNOPQ ANISOUABCDE FGHIcdefVWXYZab 110000 120000 130000 140000 150000 160000 JKLMNOPQ """[ :-1 ] assert not show_diff(awl.format_atom_record_group(siguij=False), expected) if pdb.hierarchy.atom.has_siguij(): assert not show_diff( awl.format_atom_record_group(), expected + """ SIGUIJABCDE FGHIcdefVWXYZab 170000 180000 190000 200000 210000 220000 JKLMNOPQ """[ :-1 ], )
def exercise_strip_spaces_separate_strings(): from fable.read import Error, source_line, strip_spaces_separate_strings import itertools global_line_index_generator = itertools.count() def slc(cmbnd): return [ source_line( global_line_index_generator=global_line_index_generator, file_name="str", line_number=i+1, text=" "+line) for i,line in enumerate(cmbnd.splitlines())] def check(cmbnd, expected_code, expected_strings): for cmbnd_work,expected_strings_work in [ (cmbnd, expected_strings), (cmbnd.replace("'", '"'), [s.replace("'", '"') for s in expected_strings])]: ssl = strip_spaces_separate_strings(source_line_cluster=slc(cmbnd_work)) assert not show_diff(ssl.code, expected_code) assert len(ssl.strings) == len(ssl.string_indices) assert ssl.strings == expected_strings_work expected_string_indices = [] for i,c in enumerate(expected_code): if (c == "'" or c == '"'): expected_string_indices.append(i) assert ssl.string_indices == expected_string_indices if (cmbnd.find('"') > 0): break check("a = 0", "a=0", []) check("a = ''", "a='", [""]) check("a = '\"'", "a='", ["\""]) check("a = \"'\"", "a='", ["'"]) check("a = 'x'", "a='", ["x"]) check("a = ' x ' ", "a='", [" x "]) check("a = ''''", "a='", ["'"]) check("call foo('')", "callfoo(')", [""]) check("call foo('''')", "callfoo(')", ["'"]) check("c a l l f o o ( '''' ) ", "callfoo(')", ["'"]) check(" c a l l f o o ( '''' ) ", "callfoo(')", ["'"]) check(" C A L LF O O ( ' abc '''' def ''' , ' g''h''i''' , X ) ", "callfoo(',',x)", [" abc '' def '", " g'h'i'"]) check("a = '\n'", "a='", [""]) check("a = 'x\n'", "a='", ["x"]) check("a = '\ny'", "a='", ["y"]) check("a = 'x\ny'", "a='", ["xy"]) check("a = '''\n'", "a='", ["'"]) check("a = '\n'''", "a='", ["'"]) check("a = '''\n'''", "a='", ["''"]) # for cmbnd,q,nd in [("'abc", "'", 9), ('x="', '"', 11)]: try: strip_spaces_separate_strings(source_line_cluster=slc(cmbnd)) except Error, e: assert not show_diff(str(e), """\ Missing terminating %s character: at str(1): | %s| %s^""" % (q, cmbnd, "-"*nd)) else: raise Exception_expected
def exercise_input_pickling(): pdb_inp = pdb.pdb_input(source_info="file/name", lines=pdb_string_all_sections) for p in [pickle, cPickle]: s = p.dumps(pdb_inp, 1) l = p.loads(s) assert not show_diff(l.as_pdb_string(), pdb_inp.as_pdb_string()) assert l.source_info() == "pickle" for section in pdb.input_sections: assert not show_diff( "\n".join(getattr(l, section)()), "\n".join(getattr(pdb_inp, section)())) s = "\n".join(l.__getinitargs__()[1]) d = hashlib_md5(s).hexdigest() if (pdb.hierarchy.atom.has_siguij()): assert d == "bf987c40cc8672e2f2324d91d6de3e2b" else: assert d == "7375e96fd52794a785284580730de20c"
def run_and_compare_sdrive_fem(this_script): sdrive_fem = libtbx.env.under_build(path="scitbx/lbfgs/sdrive_fem") if (not os.path.isfile(sdrive_fem)): return outputs = [] for cmd in [sdrive_fem, 'scitbx.python "%s" fortran 100 5 1 0' % this_script]: outputs.append(run_cmd(cmd=cmd)) assert not show_diff(outputs[0], outputs[1])
def exercise_basic(): a = flex.double((10,20)) p = flex.double((-25,355)) c = flex.polar(a, p, True) f = flex.double((0.3,0.9)) s = miller.set( crystal_symmetry=crystal.symmetry( unit_cell=(10,10,10,90,90,90), space_group_symbol="P1"), indices=flex.miller_index([(1,2,3),(-3,4,-6)]), anomalous_flag=False) out = StringIO() s.array(data=c).as_phases_phs(out=out) assert not show_diff(out.getvalue(), """\ 1 2 3 4999.99 1.00 -25.00 -3 4 -6 9999.99 1.00 -5.00 """) out = StringIO() s.array(data=c).as_phases_phs(out=out, scale_amplitudes=False) assert not show_diff(out.getvalue(), """\ 1 2 3 10.00 1.00 -25.00 -3 4 -6 20.00 1.00 -5.00 """) for phases in [s.array(data=p), p]: out = StringIO() s.array(data=c).amplitudes().as_phases_phs( out=out, phases=phases, phases_deg=True) assert not show_diff(out.getvalue(), """\ 1 2 3 4999.99 1.00 -25.00 -3 4 -6 9999.99 1.00 355.00 """) for phases in [s.array(data=p*(math.pi/180)), p*(math.pi/180)]: out = StringIO() s.array(data=c).amplitudes().as_phases_phs( out=out, phases=phases, phases_deg=False) assert not show_diff(out.getvalue(), """\ 1 2 3 4999.99 1.00 -25.00 -3 4 -6 9999.99 1.00 355.00 """) for figures_of_merit in [s.array(data=f), f]: out = StringIO() s.array(data=c).as_phases_phs(out=out, figures_of_merit=figures_of_merit) assert not show_diff(out.getvalue(), """\ 1 2 3 4999.99 0.30 -25.00 -3 4 -6 9999.99 0.90 -5.00 """)
def exercise_weighting_schemes(): unit_weighting = least_squares.unit_weighting() assert unit_weighting.type() == "unit" assert str(unit_weighting) == "w=1" shelx_weighting = least_squares.mainstream_shelx_weighting(0.1234, 0.5678) assert shelx_weighting.type() == "calc" assert not show_diff( str(shelx_weighting), "w=1/[\s^2^(Fo^2^)+(0.1234P)^2^+0.5678P] where P=(Fo^2^+2Fc^2^)/3")
def exercise(space_group_info, anomalous_flag, d_min=1.0, reflections_per_bin=200, n_bins=10, verbose=0): elements = ("N", "C", "C", "O") * 5 structure_factors = random_structure.xray_structure( space_group_info, elements=elements, volume_per_atom=50., min_distance=1.5, general_positions_only=True, use_u_aniso=False, u_iso=adptbx.b_as_u(10) ).structure_factors( anomalous_flag=anomalous_flag, d_min=d_min, algorithm="direct") if (0 or verbose): structure_factors.xray_structure().show_summary() asu_contents = dicts.with_default_value(0) for elem in elements: asu_contents[elem] += 1 f_calc = abs(structure_factors.f_calc()) f_calc.setup_binner( auto_binning=True, reflections_per_bin=reflections_per_bin, n_bins=n_bins) if (0 or verbose): f_calc.binner().show_summary() for k_given in [1,0.1,0.01,10,100]: f_obs = miller.array( miller_set=f_calc, data=f_calc.data()*k_given).set_observation_type_xray_amplitude() f_obs.use_binner_of(f_calc) wp = statistics.wilson_plot(f_obs, asu_contents, e_statistics=True) if (0 or verbose): print "wilson_k, wilson_b:", wp.wilson_k, wp.wilson_b print "space group:", space_group_info.group().type().hall_symbol() print "<E^2-1>:", wp.mean_e_sq_minus_1 assert 0.8 < wp.wilson_k/k_given < 1.2 assert 0.64 < wp.wilson_intensity_scale_factor/(k_given*k_given) < 1.44 assert 9 < wp.wilson_b < 11 assert wp.xy_plot_info().fit_correlation == wp.fit_correlation if space_group_info.group().is_centric(): assert 0.90 < wp.mean_e_sq_minus_1 < 1.16 assert 3.15 < wp.percent_e_sq_gt_2 < 6.5 else: assert 0.65 < wp.mean_e_sq_minus_1 < 0.90 assert 1.0 < wp.percent_e_sq_gt_2 < 3.15 assert wp.normalised_f_obs.size() == f_obs.size() f_obs = f_calc.array(data=flex.double(f_calc.indices().size(), 0)) f_obs.use_binner_of(f_calc) n_bins = f_obs.binner().n_bins_used() try: statistics.wilson_plot(f_obs, asu_contents) except RuntimeError, e: assert not show_diff(str(e), """\ wilson_plot error: %d empty bins: Number of bins: %d Number of f_obs > 0: 0 Number of f_obs <= 0: %d""" % (n_bins, n_bins, f_obs.indices().size()))
def exercise_01(): import libtbx.load_env if not libtbx.env.has_module("reduce"): print "Reduce not installed, needed for model.add_hydrogens(). skipping" return pdb_file_name = "add_h_to_hoh.pdb" tmp_f = open(pdb_file_name, "w") tmp_f.write(input_model) tmp_f.close() processed_pdb_files_srv = mmtbx.utils.process_pdb_file_srv(log=StringIO()) processed_pdb_file, pdb_inp = processed_pdb_files_srv.process_pdb_files(pdb_file_names=[pdb_file_name]) xray_structure = processed_pdb_file.xray_structure() # geometry = processed_pdb_file.geometry_restraints_manager(show_energies=False, assume_hydrogens_all_missing=True) restraints_manager = mmtbx.restraints.manager(geometry=geometry) # model = mmtbx.model.manager( refinement_flags=None, processed_pdb_files_srv=processed_pdb_files_srv, restraints_manager=restraints_manager, xray_structure=xray_structure, pdb_hierarchy=processed_pdb_file.all_chain_proxies.pdb_hierarchy, log=None, ) #### model.add_hydrogens(correct_special_position_tolerance=1.0) result = StringIO() model.write_pdb_file(out=result) result = result.getvalue().splitlines() #### result1 = [] for r1 in result: if r1.startswith("ATOM") or r1.startswith("HETATM"): result1.append(r1) result2 = [] for r2 in expected_result.splitlines(): if r2.startswith("ATOM") or r2.startswith("HETATM"): result2.append(r2) assert len(result1) == len(result2) for r1, r2 in zip(result1, result2): r1 = r1[:30] + r1[60:] r2 = r2[:30] + r2[60:] assert not show_diff(r1, r2) #### cntr = 0 xrs1 = iotbx.pdb.pdb_input( source_info=None, lines=flex.std_string(expected_result.splitlines()) ).xray_structure_simple() xrs2 = iotbx.pdb.pdb_input(source_info=None, lines=flex.std_string(result)).xray_structure_simple() for s1, s2 in zip(xrs1.scatterers(), xrs2.scatterers()): if s1.element_symbol().strip() not in ["H", "D"]: assert s1.element_symbol().strip() == s2.element_symbol().strip() assert approx_equal(s1.site, s2.site) cntr += 1 assert cntr == 19
def recycle(expected_column_data): mtz_obj = ma.as_mtz_dataset(column_root_label="X").mtz_object() sio = StringIO() mtz_obj.show_column_data_human_readable(out=sio) from libtbx.test_utils import show_diff if (verbose): sys.stdout.write(sio.getvalue()) assert not show_diff(sio.getvalue(), expected_column_data) ma_2 = only_element(mtz_obj.as_miller_arrays()) assert_equal_data_and_sigmas(ma, ma_2)