def exercise_pdb_input_error_handling(): from libtbx.test_utils import open_tmp_file from libtbx.test_utils import Exception_expected bad_pdb_string = """\ ATOM 1 CB LYS 109 16.113 7.345 47.084 1.00 20.00 A ATOM 2 CG LYS 109 17.058 6.315 47.703 1.00 20.00 A ATOM 3 CB LYS 109 26.721 1.908 15.275 1.00 20.00 B ATOM 4 CG LYS 109 27.664 2.793 16.091 1.00 20.00 B """ f = open_tmp_file(suffix="bad.pdb") f.write(bad_pdb_string) f.close() try: pdb_inp = pdb.input(file_name=f.name, source_info=None) except ValueError as e: err_message = str(e) assert bad_pdb_string.splitlines()[0] in err_message else: raise Exception_expected bad_cif_loop_string = """\ data_cif loop_ _atom_site.group_PDB _atom_site.id _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.auth_asym_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.type_symbol _atom_site.pdbx_formal_charge _atom_site.label_asym_id _atom_site.label_entity_id _atom_site.label_seq_id #_atom_site.pdbx_PDB_model_num ATOM 47 CA . THR C 22 ? -7.12300 19.28700 -2.26800 1.000 8.32783 C ? B ? 11 1 ATOM 52 CA . ASN C 25 ? -11.06500 18.97000 -5.48100 1.000 8.20531 C ? C ? 12 1 ATOM 60 CA . VAL C 26 ? -12.16900 22.54800 -4.78000 1.000 8.45988 C ? C ? 13 1 """ f = open_tmp_file(suffix="bad.cif") f.write(bad_cif_loop_string) f.close() try: pdb_inp = pdb.input(file_name=f.name, source_info=None) except iotbx.cif.CifParserError as e: err_message = str(e) assert err_message == \ "Wrong number of data items for loop containing _atom_site.group_PDB" else: raise Exception_expected
def exercise_spots_xds(): txt = """\ 1104.20 1290.27 2.20 632. -4 -3 -1 912.22 1303.37 3.49 346. 0 0 0 1427.55 1339.93 3.34 259. 7 3 -1 1380.54 1187.54 3.58 243. 4 3 4 1222.32 1220.09 3.95 241. -1 0 2 1491.71 1322.33 2.72 237. 9 4 0 1053.50 1227.71 2.87 221. -6 -4 1 """ f = open_tmp_file(suffix="SPOTS.XDS", mode="wb") f.write(txt) f.close() spots_in = spot_xds.reader() spots_in.read_file(f.name) assert approx_equal( spots_in.centroid, [[1104.2, 1290.27, 2.2], [912.22, 1303.37, 3.49], [1427.55, 1339.93, 3.34], [1380.54, 1187.54, 3.58], [1222.32, 1220.09, 3.95], [1491.71, 1322.33, 2.72], [1053.5, 1227.71, 2.87]]) assert approx_equal( spots_in.intensity, [632.0, 346.0, 259.0, 243.0, 241.0, 237.0, 221.0]) assert approx_equal( spots_in.miller_index, [[-4, -3, -1], [0, 0, 0], [7, 3, -1], [4, 3, 4], [-1, 0, 2], [9, 4, 0], [-6, -4, 1]]) spots_out = spot_xds.writer( spots_in.centroid, spots_in.intensity, spots_in.miller_index) f = open_tmp_file(suffix="SPOTS.XDS", mode="wb") f.close() spots_out.write_file(filename=f.name) spots_in = spot_xds.reader() spots_in.read_file(f.name) assert approx_equal(spots_in.centroid, spots_out.centroids) assert approx_equal(spots_in.intensity, spots_out.intensities) assert approx_equal(spots_in.miller_index, spots_out.miller_indices) # now without miller indices spots_out = spot_xds.writer(spots_in.centroid, spots_in.intensity) f = open_tmp_file(suffix="SPOTS.XDS", mode="wb") f.close() spots_out.write_file(filename=f.name) spots_in = spot_xds.reader() spots_in.read_file(f.name) assert approx_equal(spots_in.centroid, spots_out.centroids) assert approx_equal(spots_in.intensity, spots_out.intensities) assert len(spots_in.miller_index) == 0
def exercise_writer(): from iotbx import csv_utils x = (1, 2, 3, 4, 5) y = (6, 7, 8, 9, 10) f = open_tmp_file() field_names = ('x', 'y') csv_utils.writer(f, (x, y), field_names=field_names) f.close() f = open(f.name, 'r') content = [l.strip() for l in f.readlines()] text = ['x,y'] text += ['%s,%s' % (row[0], row[1]) for row in zip(x, y)] assert content == text f.close() x = (1, 2, 3, 4, 5) y = (6, 7, 8, 9, 10) f = open_tmp_file() csv_utils.writer(f, (x, y), delimiter=';') f.close() f = open(f.name, 'r') content = [l.strip() for l in f.readlines()] text = ['%s;%s' % (row[0], row[1]) for row in zip(x, y)] assert content == text f.close() x = flex.int(x) y = flex.int(y) f = open_tmp_file() csv_utils.writer(f, (x, y), field_names=field_names) f.close() f = open(f.name, 'r') content = [l.strip() for l in f.readlines()] text = ['x,y'] text += ['%s,%s' % (row[0], row[1]) for row in zip(x, y)] assert content == text f.close() y.append(11) f = open_tmp_file() try: csv_utils.writer(f, (x, y), field_names=field_names) except AssertionError: pass else: raise Exception_expected f.close()
def exercise_spots_xds(): txt = """\ 1104.20 1290.27 2.20 632. -4 -3 -1 912.22 1303.37 3.49 346. 0 0 0 1427.55 1339.93 3.34 259. 7 3 -1 1380.54 1187.54 3.58 243. 4 3 4 1222.32 1220.09 3.95 241. -1 0 2 1491.71 1322.33 2.72 237. 9 4 0 1053.50 1227.71 2.87 221. -6 -4 1 """ f = open_tmp_file(suffix="SPOTS.XDS", mode="wb") f.write(txt) f.close() spots_in = spot_xds.reader() spots_in.read_file(f.name) assert approx_equal(spots_in.centroid, [[1104.2, 1290.27, 2.2], [912.22, 1303.37, 3.49], [1427.55, 1339.93, 3.34], [1380.54, 1187.54, 3.58], [1222.32, 1220.09, 3.95], [1491.71, 1322.33, 2.72], [1053.5, 1227.71, 2.87]]) assert approx_equal(spots_in.intensity, [632.0, 346.0, 259.0, 243.0, 241.0, 237.0, 221.0]) assert approx_equal(spots_in.miller_index, [[-4, -3, -1], [0, 0, 0], [7, 3, -1], [4, 3, 4], [-1, 0, 2], [9, 4, 0], [-6, -4, 1]]) spots_out = spot_xds.writer(spots_in.centroid, spots_in.intensity, spots_in.miller_index) f = open_tmp_file(suffix="SPOTS.XDS", mode="wb") f.close() spots_out.write_file(filename=f.name) spots_in = spot_xds.reader() spots_in.read_file(f.name) assert approx_equal(spots_in.centroid, spots_out.centroids) assert approx_equal(spots_in.intensity, spots_out.intensities) assert approx_equal(spots_in.miller_index, spots_out.miller_indices) # now without miller indices spots_out = spot_xds.writer(spots_in.centroid, spots_in.intensity) f = open_tmp_file(suffix="SPOTS.XDS", mode="wb") f.close() spots_out.write_file(filename=f.name) spots_in = spot_xds.reader() spots_in.read_file(f.name) assert approx_equal(spots_in.centroid, spots_out.centroids) assert approx_equal(spots_in.intensity, spots_out.intensities) assert len(spots_in.miller_index) == 0
def exercise_density_modification(): cci_structure_lib = os.environ.get("CCI_STRUCTURE_LIB") if cci_structure_lib is None: print "Skipping exercise_density_modification(): $CCI_STRUCTURE_LIB is not set" return rnase_s_path = os.path.join(cci_structure_lib, "rnase-s") from libtbx.test_utils import open_tmp_file tmp_file = open_tmp_file(suffix=".params") tmp_file.write(params % (rnase_s_path, rnase_s_path)) tmp_file.close() #dm = density_modification.run( #args=[tmp_file.name, "%s/model/1RGE.pdb"%rnase_s_path]) args = ( tmp_file.name, op.join(rnase_s_path, "model", "1RGE.pdb")) for arg in args: assert arg.find('"') < 0 cmd = 'mmtbx.density_modification "%s" "%s"' % args print cmd result = easy_run.fully_buffered(command=cmd).raise_if_errors() assert result.stdout_lines[-5].startswith('Starting dm/model correlation:') assert result.stdout_lines[-4].startswith('Final dm/model correlation:') assert approx_equal(float(result.stdout_lines[-5].split()[-1]), 0.59, 0.01) assert approx_equal(float(result.stdout_lines[-4].split()[-1]), 0.80, 0.01)
def exercise_compilation(): ucif_dist = libtbx.env.dist_path(module_name="ucif") antlr3_dist = libtbx.env.under_dist("ucif", "antlr3") os.environ["LIBTBX_UCIF"] = ucif_dist os.environ["LIBTBX_ANTLR3"] = antlr3_dist assert ucif_dist.find('"') < 0 if sys.platform == "win32": cmd = '"%s/examples/build_cif_parser.bat"' %ucif_dist ext = ".exe" else: cmd = '. "%s/examples/build_cif_parser.sh"' %ucif_dist ext = "" result = easy_run.fully_buffered(cmd) if result.return_code: if len(result.stderr_lines) > 0: raise RuntimeError, result.show_stderr() raise RuntimeError, result.show_stdout() assert os.path.exists("cif_parser"+ext) f = open_tmp_file(suffix=".cif") f.write(cif_string) f.close() cmd = 'cif_parser "%s"' %f.name cmd = os.path.join(".", cmd) r = easy_run.fully_buffered(cmd).raise_if_errors() assert r.stdout_lines[0].startswith("Congratulations!")
def exercise_compilation(): ucif_dist = libtbx.env.dist_path(module_name="ucif") antlr3_dist = libtbx.env.under_dist("ucif", "antlr3") os.environ["LIBTBX_UCIF"] = ucif_dist os.environ["LIBTBX_ANTLR3"] = antlr3_dist assert ucif_dist.find('"') < 0 if sys.platform == "win32": cmd = '"%s/examples/build_cif_parser.bat"' % ucif_dist ext = ".exe" else: cmd = '"%s/examples/build_cif_parser.sh"' % ucif_dist ext = "" result = easy_run.fully_buffered(cmd) if result.return_code: if len(result.stderr_lines) > 0: raise RuntimeError, result.show_stderr() raise RuntimeError, result.show_stdout() assert os.path.exists("cif_parser" + ext) f = open_tmp_file(suffix=".cif") f.write(cif_string) f.close() cmd = 'cif_parser "%s"' % f.name cmd = os.path.join(".", cmd) r = easy_run.fully_buffered(cmd).raise_if_errors() assert r.stdout_lines[0].startswith("Congratulations!")
def exercise_writer(): from iotbx import csv_utils x = (1,2,3,4,5) y = (6,7,8,9,10) f = open_tmp_file() field_names = ('x','y') csv_utils.writer(f, (x,y), field_names=field_names) f.close() f = open(f.name, 'r') content = f.readlines() text = ['x,y\r\n'] text += ['%s,%s\r\n' %(row[0],row[1]) for row in zip(x,y)] assert content == text f.close() x = (1,2,3,4,5) y = (6,7,8,9,10) f = open_tmp_file() csv_utils.writer(f, (x,y), delimiter=';') f.close() f = open(f.name, 'r') content = f.readlines() text = ['%s;%s\r\n' %(row[0],row[1]) for row in zip(x,y)] assert content == text x = flex.int(x) y = flex.int(y) f = open_tmp_file() csv_utils.writer(f, (x,y), field_names=field_names) f.close() f = open(f.name, 'r') content = f.readlines() text = ['x,y\r\n'] text += ['%s,%s\r\n' %(row[0],row[1]) for row in zip(x,y)] assert content == text f.close() y.append(11) f = open_tmp_file() try: csv_utils.writer(f, (x,y), field_names=field_names) except AssertionError: pass else: raise Exception_expected f.close()
def exercise_reader(): from iotbx import csv_utils x = (1,2,3,4,5) y = (6,7,8,9,10) f = open_tmp_file() field_names = ('x','y') csv_utils.writer(f, (x,y), field_names=field_names,delimiter=';') f.close() f = open(f.name, 'r') a = csv_utils.reader(f, data_type=int, field_names=True,delimiter=';') f.close() assert tuple(a.data[0]) == x assert tuple(a.data[1]) == y x = (1,2,3,4,5) y = (1.1,2.2,3.3,4.4,5.5) f = open_tmp_file() csv_utils.writer(f, (x,y)) f.close() f = open(f.name, 'r') data_type_list = (int, float) a = csv_utils.reader(f, data_type_list=data_type_list) f.close() assert tuple(a.data[0]) == x assert tuple(a.data[1]) == y f = open(f.name, 'r') data_type_list = (int, float) try: a = csv_utils.reader(f, data_type=int, data_type_list=data_type_list) # Can't pass data_type AND data_type_list except AssertionError: pass else: raise Exception_expected f.close() f = open(f.name, 'r') a = csv_utils.reader(f) f.close() assert list(a.data[0]) == [str(i) for i in x] assert list(a.data[1]) == [str(i) for i in y]
def exercise_crystal_symmetry(): cm = cif.reader(input_string=p1_sym_ops).model() cs_builder = cif.builders.crystal_symmetry_builder(cm["r1e5xsf"]) assert cs_builder.crystal_symmetry.space_group_info().symbol_and_number() \ == 'P 1 (No. 1)' file_object = open_tmp_file(suffix=".cif") file_object.write(p1_sym_ops) file_object.close() cs = crystal_symmetry_from_any.extract_from(file_name=file_object.name) assert cs.space_group_info().symbol_and_number() == 'P 1 (No. 1)'
def exercise_reader(): from iotbx import csv_utils x = (1, 2, 3, 4, 5) y = (6, 7, 8, 9, 10) f = open_tmp_file() field_names = ('x', 'y') csv_utils.writer(f, (x, y), field_names=field_names, delimiter=';') f.close() f = open(f.name, 'r') a = csv_utils.reader(f, data_type=int, field_names=True, delimiter=';') f.close() assert tuple(a.data[0]) == x assert tuple(a.data[1]) == y x = (1, 2, 3, 4, 5) y = (1.1, 2.2, 3.3, 4.4, 5.5) f = open_tmp_file() csv_utils.writer(f, (x, y)) f.close() f = open(f.name, 'r') data_type_list = (int, float) a = csv_utils.reader(f, data_type_list=data_type_list) f.close() assert tuple(a.data[0]) == x assert tuple(a.data[1]) == y f = open(f.name, 'r') data_type_list = (int, float) try: a = csv_utils.reader(f, data_type=int, data_type_list=data_type_list) # Can't pass data_type AND data_type_list except AssertionError: pass else: raise Exception_expected f.close() f = open(f.name, 'r') a = csv_utils.reader(f) f.close() assert list(a.data[0]) == [str(i) for i in x] assert list(a.data[1]) == [str(i) for i in y]
def exercise_mmcif(): input_4edr = """\ data_4EDR _cell.length_a 150.582 _cell.length_b 150.582 _cell.length_c 38.633 _cell.angle_alpha 90.000 _cell.angle_beta 90.000 _cell.angle_gamma 120.000 # _symmetry.space_group_name_H-M 'P 61' # loop_ _atom_site.group_PDB _atom_site.id _atom_site.type_symbol _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.label_asym_id _atom_site.label_entity_id _atom_site.label_seq_id _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.auth_seq_id _atom_site.auth_comp_id _atom_site.auth_asym_id _atom_site.auth_atom_id _atom_site.pdbx_PDB_model_num ATOM 1 N N . SER A 1 1 21.138 -69.073 17.360 1.00 23.68 108 SER A N 1 ATOM 2 C CA . SER A 1 1 22.164 -68.793 18.358 1.00 22.98 108 SER A CA 1 ATOM 3 C C . SER A 1 1 23.173 -67.799 17.805 1.00 21.13 108 SER A C 1 ATOM 4 O O . SER A 1 1 23.251 -67.594 16.595 1.00 19.34 108 SER A O 1 ATOM 5 C CB . SER A 1 1 22.882 -70.080 18.766 1.00 22.68 108 SER A CB 1 ATOM 6 O OG . SER A 1 1 23.683 -70.569 17.703 1.00 24.00 108 SER A OG 1 HETATM 2650 MN MN . MN F 4 . 9.296 -44.783 -6.320 1.00 44.18 505 MN A MN 1 """ f = open_tmp_file(suffix="cif", mode="w") print >> f, input_4edr f.close() mmcif = any_file(f.name) mmcif.assert_file_type("pdb") mmcif.check_file_type("pdb") symm = mmcif.crystal_symmetry() assert (str(symm.space_group_info()) == "P 61") assert (str(symm.unit_cell()) == "(150.582, 150.582, 38.633, 90, 90, 120)") assert mmcif.file_description == 'Model' hierarchy = mmcif.file_object.hierarchy assert len(hierarchy.atoms()) == 7 remarks = mmcif.file_object.input.remark_section() assert (len(remarks) == 0)
def exercise_to_xds(): if not libtbx.env.has_module("dials"): print "Skipping test: dials not present" return if not libtbx.env.has_module("dials_regression"): print "Skipping exercise_to_xds(): dials_regression not present" return data_dir = libtbx.env.find_in_repositories( relative_path="dials_regression/centroid_test_data", test=os.path.isdir) template = os.path.join(data_dir, "centroid_00*.cbf") file_names = glob.glob(template) sweep = ImageSetFactory.new(file_names)[0] to_xds = xds.to_xds(sweep) s1 = StringIO() to_xds.XDS_INP(out=s1) s2 = StringIO() real_space_a = (-5.327642, -39.034747, -4.988286) real_space_b = (-35.253495, 7.596265, -22.127661) real_space_c = (-22.673623, -1.486119, 35.793463) to_xds.xparm_xds(real_space_a, real_space_b, real_space_c, space_group=1, out=s2) # run coordinate frame converter on xparm.xds as a sanity check f = open_tmp_file(suffix="XPARM.XDS", mode="wb") s2.seek(0) f.writelines(s2.readlines()) f.close() from rstbx.cftbx import coordinate_frame_helpers converter = coordinate_frame_helpers.import_xds_xparm(f.name) scan = sweep.get_scan() detector = sweep.get_detector() goniometer = sweep.get_goniometer() beam = sweep.get_beam() assert approx_equal(real_space_a, converter.get_real_space_a()) assert approx_equal(real_space_b, converter.get_real_space_b()) assert approx_equal(real_space_c, converter.get_real_space_c()) assert approx_equal(goniometer.get_rotation_axis(), converter.get_rotation_axis()) assert approx_equal(beam.get_direction(), converter.get_sample_to_source().elems) assert approx_equal(detector[0].get_fast_axis(), converter.get_detector_fast()) assert approx_equal(detector[0].get_slow_axis(), converter.get_detector_slow()) assert approx_equal(detector[0].get_origin(), converter.get_detector_origin())
def exercise_pdb_input_error_handling(): from libtbx.test_utils import open_tmp_file from libtbx.test_utils import Exception_expected bad_pdb_string = """\ ATOM 1 CB LYS 109 16.113 7.345 47.084 1.00 20.00 A ATOM 2 CG LYS 109 17.058 6.315 47.703 1.00 20.00 A ATOM 3 CB LYS 109 26.721 1.908 15.275 1.00 20.00 B ATOM 4 CG LYS 109 27.664 2.793 16.091 1.00 20.00 B """ f = open_tmp_file(suffix="bad.pdb") f.write(bad_pdb_string) f.close() try: pdb_inp = pdb.input(file_name=f.name, source_info=None) except ValueError, e: err_message = str(e) assert bad_pdb_string.splitlines()[0] in err_message
def exercise_to_xds(): if not libtbx.env.has_module("dials_regression"): print "Skipping exercise_to_xds(): dials_regression not present" return data_dir = libtbx.env.find_in_repositories( relative_path="dials_regression/centroid_test_data", test=os.path.isdir) template = os.path.join(data_dir, "centroid_00*.cbf") file_names = glob.glob(template) sweep = ImageSetFactory.new(file_names)[0] to_xds = xds.to_xds(sweep) s1 = StringIO() to_xds.XDS_INP(out=s1) s2 = StringIO() real_space_a = (-5.327642, -39.034747, -4.988286) real_space_b = (-35.253495, 7.596265, -22.127661) real_space_c = (-22.673623, -1.486119, 35.793463) to_xds.xparm_xds(real_space_a, real_space_b, real_space_c, space_group=1, out=s2) # run coordinate frame converter on xparm.xds as a sanity check f = open_tmp_file(suffix="XPARM.XDS", mode="wb") s2.seek(0) f.writelines(s2.readlines()) f.close() from rstbx.cftbx import coordinate_frame_helpers converter = coordinate_frame_helpers.import_xds_xparm(f.name) scan = sweep.get_scan() detector = sweep.get_detector() goniometer = sweep.get_goniometer() beam = sweep.get_beam() assert approx_equal(real_space_a, converter.get_real_space_a()) assert approx_equal(real_space_b, converter.get_real_space_b()) assert approx_equal(real_space_c, converter.get_real_space_c()) assert approx_equal(goniometer.get_rotation_axis(), converter.get_rotation_axis()) assert approx_equal( beam.get_direction(), converter.get_sample_to_source().elems) assert approx_equal(detector[0].get_fast_axis(), converter.get_detector_fast()) assert approx_equal(detector[0].get_slow_axis(), converter.get_detector_slow()) assert approx_equal(detector[0].get_origin(), converter.get_detector_origin())
def exercise_density_modification(): cci_structure_lib = os.environ.get("CCI_STRUCTURE_LIB") if cci_structure_lib is None: print "Skipping exercise_density_modification(): $CCI_STRUCTURE_LIB is not set" return rnase_s_path = os.path.join(cci_structure_lib, "rnase-s") from libtbx.test_utils import open_tmp_file tmp_file = open_tmp_file(suffix=".params") tmp_file.write(params % (rnase_s_path, rnase_s_path)) tmp_file.close() #dm = density_modification.run( #args=[tmp_file.name, "%s/model/1RGE.pdb"%rnase_s_path]) args = (tmp_file.name, op.join(rnase_s_path, "model", "1RGE.pdb")) for arg in args: assert arg.find('"') < 0 cmd = 'mmtbx.density_modification "%s" "%s"' % args print cmd result = easy_run.fully_buffered(command=cmd).raise_if_errors() assert result.stdout_lines[-5].startswith('Starting dm/model correlation:') assert result.stdout_lines[-4].startswith('Final dm/model correlation:') assert approx_equal(float(result.stdout_lines[-5].split()[-1]), 0.59, 0.01) assert approx_equal(float(result.stdout_lines[-4].split()[-1]), 0.80, 0.01)
def exercise_density_modification(): pdb_path = libtbx.env.find_in_repositories( relative_path="phenix_regression/density_modification/1RGE.pdb", test=os.path.isfile) reflection_path = libtbx.env.find_in_repositories( relative_path="phenix_regression/density_modification/scale.hkl", test=os.path.isfile) phases_path = libtbx.env.find_in_repositories( relative_path="phenix_regression/density_modification/ir_phase.hkl", test=os.path.isfile) if pdb_path is None or reflection_path is None or phases_path is None: print( "Skipping exercise_density_modification(): phenix_regression is not available" ) return from libtbx.test_utils import open_tmp_file tmp_file = open_tmp_file(suffix=".params") tmp_file.write(params % (reflection_path, phases_path)) tmp_file.close() #dm = density_modification.run( #args=[tmp_file.name, "%s/model/1RGE.pdb"%rnase_s_path]) args = (tmp_file.name, pdb_path) for arg in args: assert arg.find('"') < 0 cmd = 'mmtbx.density_modification "%s" "%s"' % args print(cmd) result = easy_run.fully_buffered(command=cmd).raise_if_errors() assert result.stdout_lines[-5].startswith('Starting dm/model correlation:') assert result.stdout_lines[-4].startswith('Final dm/model correlation:') assert approx_equal(float(result.stdout_lines[-5].split()[-1]), 0.59, 0.01) assert approx_equal(float(result.stdout_lines[-4].split()[-1]), 0.80, 0.01)
_atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.type_symbol _atom_site.pdbx_formal_charge _atom_site.label_asym_id _atom_site.label_entity_id _atom_site.label_seq_id #_atom_site.pdbx_PDB_model_num ATOM 47 CA . THR C 22 ? -7.12300 19.28700 -2.26800 1.000 8.32783 C ? B ? 11 1 ATOM 52 CA . ASN C 25 ? -11.06500 18.97000 -5.48100 1.000 8.20531 C ? C ? 12 1 ATOM 60 CA . VAL C 26 ? -12.16900 22.54800 -4.78000 1.000 8.45988 C ? C ? 13 1 """ f = open_tmp_file(suffix="bad.cif") f.write(bad_cif_loop_string) f.close() try: pdb_inp = pdb.input(file_name=f.name, source_info=None) except iotbx.cif.CifParserError, e: err_message = str(e) assert err_message == "Wrong number of data items for loop containing _atom_site.group_PDB" else: raise Exception_expected def exercise_extract_authors(): pdb_in = """ AUTHOR R.B.SUTTON,J.A.ERNST,A.T.BRUNGER """
def exercise_angle_edits_change(mon_lib_srv, ener_lib): edits = """\ refinement.geometry_restraints.edits { n_2_selection = chain A and resname ALA and resid 2 and name N ca_2_selection = chain A and resname ALA and resid 2 and name CA c_2_selection = chain A and resname ALA and resid 2 and name C angle { action = *change atom_selection_1 = $n_2_selection atom_selection_2 = $ca_2_selection atom_selection_3 = $c_2_selection angle_ideal = 100.00 sigma = 5 } }""" gm_phil = iotbx.phil.parse( monomer_library.pdb_interpretation.grand_master_phil_str, process_includes=True) edits_phil = iotbx.phil.parse(edits) working_phil = gm_phil.fetch(edits_phil) params = working_phil.extract() # print params.geometry_restraints.edits.parallelity[0].atom_selection_1 assert params.geometry_restraints.edits.angle[0].atom_selection_1.find( "name N") processed_pdb_file = monomer_library.pdb_interpretation.process( mon_lib_srv=mon_lib_srv, ener_lib=ener_lib, file_name=None, raw_records=raw_records2, params=params.pdb_interpretation, log=None) grm = processed_pdb_file.geometry_restraints_manager( params_edits=params.geometry_restraints.edits, params_remove=params.geometry_restraints.remove) assert grm.angle_proxies.size() == 20 user_defined = grm.angle_proxies.proxy_select( origin_id=origin_ids.get_origin_id('edits')) assert user_defined.size() == 1 udp = user_defined[0] assert list(udp.i_seqs) == [5, 6, 7] assert approx_equal(udp.angle_ideal, 100, eps=1e-4) assert approx_equal(udp.weight, 0.04, eps=1e-4) from libtbx.test_utils import open_tmp_file from libtbx import easy_run pdb_file = open_tmp_file(suffix="aaa.pdb") pdb_file.write('\n'.join(raw_records2)) pdb_file.close() edits_file = open_tmp_file(suffix="tau.edits") edits_file.write(edits) edits_file.close() cmd = "phenix.pdb_interpretation \"%s\" \"%s\" write_geo_files=True" % ( pdb_file.name, edits_file.name) result = easy_run.fully_buffered(cmd).raise_if_errors() geo_file = open(pdb_file.name + '.geo', "r") # geo_file = open(pdb_file.name.replace(".pdb", '_minimized.geo'), "r") geo_file_str = geo_file.read() assert '''User supplied angle restraints: 1 Sorted by residual: angle pdb=" N ALA A 2 " segid="A " pdb=" CA ALA A 2 " segid="A " pdb=" C ALA A 2 " segid="A " ideal model delta sigma weight residual 100.00''' in geo_file_str
def exercise_to_xds(): if not libtbx.env.has_module("dials"): print "Skipping test: dials not present" return if not libtbx.env.has_module("dials_regression"): print "Skipping exercise_to_xds(): dials_regression not present" return data_dir = libtbx.env.find_in_repositories( relative_path="dials_regression/centroid_test_data", test=os.path.isdir) template = os.path.join(data_dir, "centroid_*.cbf") file_names = glob.glob(template) expected_output = """\ DETECTOR=PILATUS MINIMUM_VALID_PIXEL_VALUE=0 OVERLOAD=495976 SENSOR_THICKNESS= 0.320 DIRECTION_OF_DETECTOR_X-AXIS= 1.00000 0.00000 0.00000 DIRECTION_OF_DETECTOR_Y-AXIS= 0.00000 1.00000 0.00000 NX=2463 NY=2527 QX=0.1720 QY=0.1720 DETECTOR_DISTANCE= 190.180 ORGX= 1235.84 ORGY= 1279.58 ROTATION_AXIS= 1.00000 0.00000 0.00000 STARTING_ANGLE= 0.000 OSCILLATION_RANGE= 0.200 X-RAY_WAVELENGTH= 0.97950 INCIDENT_BEAM_DIRECTION= -0.000 -0.000 1.021 FRACTION_OF_POLARIZATION= 0.999 POLARIZATION_PLANE_NORMAL= 0.000 1.000 0.000 NAME_TEMPLATE_OF_DATA_FRAMES= %s TRUSTED_REGION= 0.0 1.41 UNTRUSTED_RECTANGLE= 487 495 0 2528 UNTRUSTED_RECTANGLE= 981 989 0 2528 UNTRUSTED_RECTANGLE= 1475 1483 0 2528 UNTRUSTED_RECTANGLE= 1969 1977 0 2528 UNTRUSTED_RECTANGLE= 0 2464 195 213 UNTRUSTED_RECTANGLE= 0 2464 407 425 UNTRUSTED_RECTANGLE= 0 2464 619 637 UNTRUSTED_RECTANGLE= 0 2464 831 849 UNTRUSTED_RECTANGLE= 0 2464 1043 1061 UNTRUSTED_RECTANGLE= 0 2464 1255 1273 UNTRUSTED_RECTANGLE= 0 2464 1467 1485 UNTRUSTED_RECTANGLE= 0 2464 1679 1697 UNTRUSTED_RECTANGLE= 0 2464 1891 1909 UNTRUSTED_RECTANGLE= 0 2464 2103 2121 UNTRUSTED_RECTANGLE= 0 2464 2315 2333 DATA_RANGE= 1 9 JOB=XYCORR INIT COLSPOT IDXREF DEFPIX INTEGRATE CORRECT\ """ % (template.replace("*", "????")) cmd = " ".join(["dxtbx.to_xds"] + file_names) result = easy_run.fully_buffered(cmd) # allow extra lines to have been added (these may be comments) for record in expected_output.split('\n'): assert record.strip() in "\n".join(result.stdout_lines), record # now test reading from a json file sweep = ImageSetFactory.new(file_names)[0] f = open_tmp_file(suffix="sweep.json", mode="wb") dump.imageset(sweep, f) f.close() cmd = " ".join(["dxtbx.to_xds", f.name]) print cmd result = easy_run.fully_buffered(cmd) # allow extra lines to have been added (these may be comments) for record in expected_output.split('\n'): assert record.strip() in "\n".join(result.stdout_lines), record
def exercise_lex_parse_build(): exercise_parser(cif.reader, cif.builders.cif_model_builder) cm = cif.reader(input_string=cif_quoted_string).model() assert cm['global']['_a'] == 'a"b' assert cm['global']['_b'] == "a dog's life" stdout = sys.stdout s = StringIO() sys.stdout = s try: cif.reader(input_string=cif_invalid_missing_value) except CifParserError: pass else: raise Exception_expected r = cif.reader(input_string=cif_invalid_missing_value, raise_if_errors=False) assert r.error_count() == 1 try: cif.reader(input_string=cif_invalid_string) except CifParserError: pass else: raise Exception_expected a = cif.reader(input_string=cif_cod) assert a.error_count() == 0 try: cif.reader(input_string=cif_invalid_semicolon_text_field) except CifParserError: pass else: raise Exception_expected d = cif.reader(input_string=cif_valid_semicolon_text_field) assert d.error_count() == 0 assert d.model()['1']['_a'] == '\n1\n' e = cif.reader(input_string=cif_unquoted_string_semicolon) assert not show_diff( str(e.model()), """\ data_1 _a ;1 _b ; _c 2 """) cif_str_1 = """\ data_1 _a 1 """ cif_str_2 = """\ data_2 _b 2 """ cm = cif.reader(input_string=cif_str_1).model() assert list(cm.keys()) == ['1'] cif.reader(input_string=cif_str_2, cif_object=cm).model() assert list(cm.keys()) == ['1', '2'] try: cm = cif.reader(input_string=cif_invalid_loop).model() except CifParserError: pass else: raise Exception_expected try: cm = cif.reader(input_string=cif_invalid_loop_2).model() except CifParserError: pass else: raise Exception_expected sys.stdout = stdout arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template % ('_refln_F_calc', '_refln_F_meas', '_refln_F_sigma')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_F_calc', '_refln_F_meas'] assert arrays['_refln_F_calc'].sigmas() is None assert isinstance(arrays['_refln_F_meas'].sigmas(), flex.double) arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template % ('_refln_A_calc', '_refln_B_calc', '_refln_F_meas')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_A_calc', '_refln_F_meas'] assert arrays['_refln_A_calc'].is_complex_array() arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template % ('_refln_A_meas', '_refln_B_meas', '_refln_F_meas')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_A_meas', '_refln_F_meas'] assert arrays['_refln_A_meas'].is_complex_array() arrays = miller.array.from_cif( file_object=StringIO(cif_miller_array_template % ('_refln_intensity_calc', '_refln_intensity_meas', '_refln_intensity_sigma')), data_block_name='global') assert sorted( arrays.keys()) == ['_refln_intensity_calc', '_refln_intensity_meas'] arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template % ('_refln_F_calc', '_refln_phase_calc', '_refln_F_sigma')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_F_calc'] assert arrays['_refln_F_calc'].is_complex_array() for data_block_name in (None, "global"): miller_arrays = cif.reader( file_object=StringIO(cif_miller_array_template % ('_refln_F_calc', '_refln_F_meas', '_refln_F_sigma'))).as_miller_arrays( data_block_name=data_block_name) assert " ".join(sorted([str(ma.info()) for ma in miller_arrays])) \ == "cif:global,_refln_F_calc cif:global,_refln_F_meas,_refln_F_sigma" f = open_tmp_file(suffix="cif") f.write(cif_miller_array_template % ('_refln_F_calc', '_refln_F_meas', '_refln_F_sigma')) f.close() miller_arrays = any_reflection_file(file_name=f.name).as_miller_arrays() assert len(miller_arrays) == 2 cs = crystal.symmetry(space_group_info=sgtbx.space_group_info("P1")) miller_arrays = any_reflection_file(file_name=f.name).as_miller_arrays( crystal_symmetry=cs, force_symmetry=True, anomalous=True) assert miller_arrays[0].anomalous_flag() is True assert miller_arrays[0].crystal_symmetry().space_group() == cs.space_group( )
_atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.type_symbol _atom_site.pdbx_formal_charge _atom_site.label_asym_id _atom_site.label_entity_id _atom_site.label_seq_id #_atom_site.pdbx_PDB_model_num ATOM 47 CA . THR C 22 ? -7.12300 19.28700 -2.26800 1.000 8.32783 C ? B ? 11 1 ATOM 52 CA . ASN C 25 ? -11.06500 18.97000 -5.48100 1.000 8.20531 C ? C ? 12 1 ATOM 60 CA . VAL C 26 ? -12.16900 22.54800 -4.78000 1.000 8.45988 C ? C ? 13 1 """ f = open_tmp_file(suffix="bad.cif") f.write(bad_cif_loop_string) f.close() try: pdb_inp = pdb.input(file_name=f.name, source_info=None) except iotbx.cif.CifParserError, e: err_message = str(e) assert err_message == \ "Wrong number of data items for loop containing _atom_site.group_PDB" else: raise Exception_expected def exercise_extract_authors(): pdb_in = """ AUTHOR R.B.SUTTON,J.A.ERNST,A.T.BRUNGER """ assert iotbx.pdb.input(source_info=None, lines=pdb_in).extract_authors()==\ ['B.R.SUTTON', 'A.ERNST.J', 'A.BRUNGER.T']
def exercise_mmcif_support(): from libtbx.test_utils import open_tmp_file f = open_tmp_file(suffix="pdbtools.cif") f.write("""\ data_phenix _space_group.name_H-M_alt 'C 1 2 1' _space_group.name_Hall ' C 2y' _cell.length_a 46.053 _cell.length_b 9.561 _cell.length_c 20.871 _cell.angle_alpha 90.0 _cell.angle_beta 97.43 _cell.angle_gamma 90.0 _cell.volume 9112.60599144 loop_ _atom_site.group_PDB _atom_site.id _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.auth_asym_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.type_symbol _atom_site.pdbx_formal_charge _atom_site.label_asym_id _atom_site.label_entity_id _atom_site.label_seq_id _atom_site.pdbx_PDB_model_num ATOM 2 CA . LYS A 1 ? 7.49733 -0.62028 4.35289 1.000 10.25989 C ? A ? 1 1 ATOM 11 CA . LEU A 2 ? 3.72032 -0.19320 3.89326 1.000 7.80433 C ? A ? 2 1 ATOM 19 CA . VAL A 3 ? 0.78668 -0.39555 6.35234 1.000 5.03864 C ? A ? 3 1 ATOM 26 CA . PHE A 4 ? -2.75438 -0.21383 5.02429 1.000 8.93080 C ? A ? 4 1 ATOM 37 CA . PHE A 5 ? -6.05155 -0.46197 6.85390 1.000 9.57417 C ? A ? 5 1 ATOM 48 CA . ALA A 6 ? -9.57646 -0.10942 5.55847 1.000 17.73488 C ? A ? 6 1 ATOM 54 CA . LYS B 1 ? -8.86604 -5.20044 5.46515 1.000 16.15297 C ? B ? 7 1 """) f.close() cmd = " ".join(["phenix.pdbtools", "\"%s\"" % f.name, "rename_chain_id.old_id=A", "rename_chain_id.new_id=C"]) run_command(command=cmd, verbose=False) assert os.path.isfile(f.name+"_modified.pdb") pdb_inp = iotbx.pdb.input(file_name=f.name+"_modified.pdb") assert pdb_inp.file_type() == "pdb" hierarchy = pdb_inp.construct_hierarchy() assert [chain.id for chain in hierarchy.chains()] == ['C', 'B'] cmd = " ".join(["phenix.pdbtools", "\"%s\"" % f.name, "adp.convert_to_anisotropic=True", "output.format=mmcif"]) run_command(command=cmd, verbose=False) assert os.path.isfile(f.name+"_modified.cif") pdb_inp = iotbx.pdb.input(file_name=f.name+"_modified.cif") assert pdb_inp.file_type() == "mmcif" xs = pdb_inp.xray_structure_simple() assert xs.use_u_aniso().all_eq(True)
def exercise_mmcif_support(): from libtbx.test_utils import open_tmp_file f = open_tmp_file(suffix="pdbtools.cif") f.write("""\ data_phenix _space_group.name_H-M_alt 'C 1 2 1' _space_group.name_Hall ' C 2y' _cell.length_a 46.053 _cell.length_b 9.561 _cell.length_c 20.871 _cell.angle_alpha 90.0 _cell.angle_beta 97.43 _cell.angle_gamma 90.0 _cell.volume 9112.60599144 loop_ _atom_site.group_PDB _atom_site.id _atom_site.label_atom_id _atom_site.label_alt_id _atom_site.label_comp_id _atom_site.auth_asym_id _atom_site.auth_seq_id _atom_site.pdbx_PDB_ins_code _atom_site.Cartn_x _atom_site.Cartn_y _atom_site.Cartn_z _atom_site.occupancy _atom_site.B_iso_or_equiv _atom_site.type_symbol _atom_site.pdbx_formal_charge _atom_site.label_asym_id _atom_site.label_entity_id _atom_site.label_seq_id _atom_site.pdbx_PDB_model_num ATOM 2 CA . LYS A 1 ? 7.49733 -0.62028 4.35289 1.000 10.25989 C ? A ? 1 1 ATOM 11 CA . LEU A 2 ? 3.72032 -0.19320 3.89326 1.000 7.80433 C ? A ? 2 1 ATOM 19 CA . VAL A 3 ? 0.78668 -0.39555 6.35234 1.000 5.03864 C ? A ? 3 1 ATOM 26 CA . PHE A 4 ? -2.75438 -0.21383 5.02429 1.000 8.93080 C ? A ? 4 1 ATOM 37 CA . PHE A 5 ? -6.05155 -0.46197 6.85390 1.000 9.57417 C ? A ? 5 1 ATOM 48 CA . ALA A 6 ? -9.57646 -0.10942 5.55847 1.000 17.73488 C ? A ? 6 1 ATOM 54 CA . LYS B 1 ? -8.86604 -5.20044 5.46515 1.000 16.15297 C ? B ? 7 1 """) f.close() cmd = " ".join([ "phenix.pdbtools", "\"%s\"" % f.name, "rename_chain_id.old_id=A", "rename_chain_id.new_id=C" ]) run_command(command=cmd, verbose=False) assert os.path.isfile(f.name + "_modified.pdb") pdb_inp = iotbx.pdb.input(file_name=f.name + "_modified.pdb") assert pdb_inp.file_type() == "pdb" hierarchy = pdb_inp.construct_hierarchy() assert [chain.id for chain in hierarchy.chains()] == ['C', 'B'] cmd = " ".join([ "phenix.pdbtools", "\"%s\"" % f.name, "adp.convert_to_anisotropic=True", "output.format=mmcif" ]) run_command(command=cmd, verbose=False) assert os.path.isfile(f.name + "_modified.cif") pdb_inp = iotbx.pdb.input(file_name=f.name + "_modified.cif") assert pdb_inp.file_type() == "mmcif" xs = pdb_inp.xray_structure_simple() assert xs.use_u_aniso().all_eq(True)
def exercise_to_xds(): if not libtbx.env.has_module("dials_regression"): print "Skipping exercise_to_xds(): dials_regression not present" return data_dir = libtbx.env.find_in_repositories( relative_path="dials_regression/centroid_test_data", test=os.path.isdir) template = os.path.join(data_dir, "centroid_*.cbf") file_names = glob.glob(template) expected_output = """\ DETECTOR=PILATUS MINIMUM_VALID_PIXEL_VALUE=0 OVERLOAD=495976 SENSOR_THICKNESS= 0.320 DIRECTION_OF_DETECTOR_X-AXIS= 1.00000 0.00000 0.00000 DIRECTION_OF_DETECTOR_Y-AXIS= 0.00000 1.00000 0.00000 NX=2463 NY=2527 QX=0.1720 QY=0.1720 DETECTOR_DISTANCE= 190.180 ORGX= 1235.84 ORGY= 1279.58 ROTATION_AXIS= 1.00000 0.00000 0.00000 STARTING_ANGLE= 0.000 OSCILLATION_RANGE= 0.200 X-RAY_WAVELENGTH= 0.97950 INCIDENT_BEAM_DIRECTION= -0.000 -0.000 1.021 FRACTION_OF_POLARIZATION= 0.999 POLARIZATION_PLANE_NORMAL= 0.000 1.000 0.000 NAME_TEMPLATE_OF_DATA_FRAMES= %s TRUSTED_REGION= 0.0 1.41 UNTRUSTED_RECTANGLE= 487 495 0 2528 UNTRUSTED_RECTANGLE= 981 989 0 2528 UNTRUSTED_RECTANGLE= 1475 1483 0 2528 UNTRUSTED_RECTANGLE= 1969 1977 0 2528 UNTRUSTED_RECTANGLE= 0 2464 195 213 UNTRUSTED_RECTANGLE= 0 2464 407 425 UNTRUSTED_RECTANGLE= 0 2464 619 637 UNTRUSTED_RECTANGLE= 0 2464 831 849 UNTRUSTED_RECTANGLE= 0 2464 1043 1061 UNTRUSTED_RECTANGLE= 0 2464 1255 1273 UNTRUSTED_RECTANGLE= 0 2464 1467 1485 UNTRUSTED_RECTANGLE= 0 2464 1679 1697 UNTRUSTED_RECTANGLE= 0 2464 1891 1909 UNTRUSTED_RECTANGLE= 0 2464 2103 2121 UNTRUSTED_RECTANGLE= 0 2464 2315 2333 DATA_RANGE= 1 9 JOB=XYCORR INIT COLSPOT IDXREF DEFPIX INTEGRATE CORRECT\ """ %(template.replace("*", "????")) cmd = " ".join(["dxtbx.to_xds"] + file_names) result = easy_run.fully_buffered(cmd) # allow extra lines to have been added (these may be comments) for record in expected_output.split('\n'): assert record.strip() in "\n".join(result.stdout_lines), record # now test reading from a json file sweep = ImageSetFactory.new(file_names)[0] f = open_tmp_file(suffix="sweep.json", mode="wb") dump.imageset(sweep, f) f.close() cmd = " ".join(["dxtbx.to_xds", f.name]) result = easy_run.fully_buffered(cmd) # allow extra lines to have been added (these may be comments) for record in expected_output.split('\n'): assert record.strip() in "\n".join(result.stdout_lines), record
def run(self): from iotbx.xds import xparm import os import libtbx.load_env from libtbx.test_utils import open_tmp_file iotbx_dir = libtbx.env.dist_path('iotbx') filename = os.path.join(iotbx_dir, 'xds', 'tests', 'XPARM.XDS') handle = xparm.reader() assert handle.find_version(filename) == 1 handle.read_file(filename) print 'OK' filename = os.path.join(iotbx_dir, 'xds', 'tests', 'NEW_XPARM.XDS') handle = xparm.reader() assert handle.find_version(filename) == 2 handle.read_file(filename) print 'OK' f = open_tmp_file(suffix='XPARM.XDS', mode='wb') f.close() writer = xparm.writer( handle.starting_frame, handle.starting_angle, handle.oscillation_range, handle.rotation_axis, handle.wavelength, handle.beam_vector, handle.space_group, handle.unit_cell, handle.unit_cell_a_axis, handle.unit_cell_b_axis, handle.unit_cell_c_axis, handle.num_segments, handle.detector_size, handle.pixel_size, handle.detector_origin, handle.detector_distance, handle.detector_x_axis, handle.detector_y_axis, handle.detector_normal, handle.segments, handle.orientation) writer.write_file(f.name) handle_recycled = xparm.reader() # make sure we wrote out version 2 assert handle_recycled.find_version(f.name) == 2 handle_recycled.read_file(f.name) for handle in (handle, handle_recycled): # Scan and goniometer stuff assert handle.starting_frame == 1 assert handle.starting_angle == 82.0 assert handle.oscillation_range == 0.1500 assert handle.rotation_axis == (0.999997, -0.001590, -0.001580) # Beam stuff assert handle.wavelength == 0.976250 assert handle.beam_vector == (0.001608, 0.004392, 1.024317) # Detector stuff assert handle.detector_size == (2463, 2527) assert handle.pixel_size == (0.172, 0.172) assert handle.detector_distance == 264.928955 assert handle.detector_origin == (1224.856812, 1187.870972) assert handle.detector_x_axis == (1.0, 0.0, 0.0) assert handle.detector_y_axis == (0.0, 1.0, 0.0) assert handle.detector_normal == (0.0, 0.0, 1.0) # Crystal stuff assert handle.space_group == 75 assert handle.unit_cell == (57.7831, 57.7831, 150.0135, 90.000, 90.000, 90.000) assert handle.unit_cell_a_axis == (-14.918090, -22.358297, 51.151196) assert handle.unit_cell_b_axis == (-19.858326, 51.608330, 16.766487) assert handle.unit_cell_c_axis == (-135.447952, -34.400188, -54.539391) # segment stuff assert handle_recycled.num_segments == 1 assert handle_recycled.segments == [(1, 1, 2463, 1, 2527)] assert handle_recycled.orientation == [ (0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0)] print 'OK'
def exercise_lex_parse_build(): exercise_parser(cif.reader, cif.builders.cif_model_builder) cm = cif.reader(input_string=cif_quoted_string).model() assert cm['global']['_a'] == 'a"b' assert cm['global']['_b'] == "a dog's life" stdout = sys.stdout s = StringIO() sys.stdout = s try: cif.reader(input_string=cif_invalid_missing_value) except CifParserError: pass else: raise Exception_expected r = cif.reader( input_string=cif_invalid_missing_value, raise_if_errors=False) assert r.error_count() == 1 try: cif.reader(input_string=cif_invalid_string) except CifParserError: pass else: raise Exception_expected a = cif.reader(input_string=cif_cod) assert a.error_count() == 0 try: cif.reader(input_string=cif_invalid_semicolon_text_field) except CifParserError: pass else: raise Exception_expected d = cif.reader(input_string=cif_valid_semicolon_text_field) assert d.error_count() == 0 assert d.model()['1']['_a'] == '\n1\n' e = cif.reader(input_string=cif_unquoted_string_semicolon) assert not show_diff(str(e.model()), """\ data_1 _a ;1 _b ; _c 2 """) cif_str_1 = """\ data_1 _a 1 """ cif_str_2 = """\ data_2 _b 2 """ cm = cif.reader(input_string=cif_str_1).model() assert cm.keys() == ['1'] cif.reader(input_string=cif_str_2, cif_object=cm).model() assert cm.keys() == ['1', '2'] try: cm = cif.reader(input_string=cif_invalid_loop).model() except CifParserError: pass else: raise Exception_expected try: cm = cif.reader(input_string=cif_invalid_loop_2).model() except CifParserError: pass else: raise Exception_expected sys.stdout = stdout arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template %( '_refln_F_calc', '_refln_F_meas', '_refln_F_sigma')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_F_calc', '_refln_F_meas'] assert arrays['_refln_F_calc'].sigmas() is None assert isinstance(arrays['_refln_F_meas'].sigmas(), flex.double) arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template %( '_refln_A_calc', '_refln_B_calc', '_refln_F_meas')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_A_calc', '_refln_F_meas'] assert arrays['_refln_A_calc'].is_complex_array() arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template %( '_refln_A_meas', '_refln_B_meas', '_refln_F_meas')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_A_meas', '_refln_F_meas'] assert arrays['_refln_A_meas'].is_complex_array() arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template %( '_refln_intensity_calc', '_refln_intensity_meas', '_refln_intensity_sigma')), data_block_name='global') assert sorted(arrays.keys()) == [ '_refln_intensity_calc', '_refln_intensity_meas'] arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template %( '_refln_F_calc', '_refln_phase_calc', '_refln_F_sigma')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_F_calc'] assert arrays['_refln_F_calc'].is_complex_array() for data_block_name in (None, "global"): miller_arrays = cif.reader(file_object=StringIO( cif_miller_array_template %( '_refln_F_calc', '_refln_F_meas', '_refln_F_sigma'))).as_miller_arrays(data_block_name=data_block_name) assert " ".join(sorted([str(ma.info()) for ma in miller_arrays])) \ == "cif:global,_refln_F_calc cif:global,_refln_F_meas,_refln_F_sigma" f = open_tmp_file(suffix="cif") f.write(cif_miller_array_template %( '_refln_F_calc', '_refln_F_meas', '_refln_F_sigma')) f.close() miller_arrays = any_reflection_file(file_name=f.name).as_miller_arrays() assert len(miller_arrays) == 2
def run(self): from iotbx.xds import xparm import os import libtbx.load_env from libtbx.test_utils import open_tmp_file iotbx_dir = libtbx.env.dist_path('iotbx') filename = os.path.join(iotbx_dir, 'xds', 'tests', 'XPARM.XDS') handle = xparm.reader() assert handle.find_version(filename) == 1 handle.read_file(filename) print('OK') filename = os.path.join(iotbx_dir, 'xds', 'tests', 'NEW_XPARM.XDS') handle = xparm.reader() assert handle.find_version(filename) == 2 handle.read_file(filename) print('OK') f = open_tmp_file(suffix='XPARM.XDS', mode='wb') f.close() writer = xparm.writer( handle.starting_frame, handle.starting_angle, handle.oscillation_range, handle.rotation_axis, handle.wavelength, handle.beam_vector, handle.space_group, handle.unit_cell, handle.unit_cell_a_axis, handle.unit_cell_b_axis, handle.unit_cell_c_axis, handle.num_segments, handle.detector_size, handle.pixel_size, handle.detector_origin, handle.detector_distance, handle.detector_x_axis, handle.detector_y_axis, handle.detector_normal, handle.segments, handle.orientation) writer.write_file(f.name) handle_recycled = xparm.reader() # make sure we wrote out version 2 assert handle_recycled.find_version(f.name) == 2 handle_recycled.read_file(f.name) for handle in (handle, handle_recycled): # Scan and goniometer stuff assert handle.starting_frame == 1 assert handle.starting_angle == 82.0 assert handle.oscillation_range == 0.1500 assert handle.rotation_axis == (0.999997, -0.001590, -0.001580) # Beam stuff assert handle.wavelength == 0.976250 assert handle.beam_vector == (0.001608, 0.004392, 1.024317) # Detector stuff assert handle.detector_size == (2463, 2527) assert handle.pixel_size == (0.172, 0.172) assert handle.detector_distance == 264.928955 assert handle.detector_origin == (1224.856812, 1187.870972) assert handle.detector_x_axis == (1.0, 0.0, 0.0) assert handle.detector_y_axis == (0.0, 1.0, 0.0) assert handle.detector_normal == (0.0, 0.0, 1.0) # Crystal stuff assert handle.space_group == 75 assert handle.unit_cell == (57.7831, 57.7831, 150.0135, 90.000, 90.000, 90.000) assert handle.unit_cell_a_axis == (-14.918090, -22.358297, 51.151196) assert handle.unit_cell_b_axis == (-19.858326, 51.608330, 16.766487) assert handle.unit_cell_c_axis == (-135.447952, -34.400188, -54.539391) # segment stuff assert handle_recycled.num_segments == 1 assert handle_recycled.segments == [(1, 1, 2463, 1, 2527)] assert handle_recycled.orientation == [ (0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0)] print('OK')