def test(tmpdir): tmpdir.chdir() from dials.array_family import flex table = flex.reflection_table() table['hkl'] = flex.miller_index(360) table['id'] = flex.int(360) table['intensity.sum.value'] = flex.double(360) table.as_pickle("temp1.pickle") table.as_pickle("temp2.pickle") result = procrunner.run_process([ 'dev.dials.merge_reflection_lists', 'temp1.pickle', 'temp2.pickle', 'method=update', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' table = flex.reflection_table.from_pickle('merged.pickle') assert len(table) == 360 result = procrunner.run_process([ 'dev.dials.merge_reflection_lists', 'temp1.pickle', 'temp2.pickle', 'method=extend', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' table = flex.reflection_table.from_pickle('merged.pickle') assert len(table) == 720
def test_thing_2(tmpdir, xia2_regression_build): '''Would you like to know more about what this test is supposed to do? I would love to. Always remember to use descriptive names.''' data_dir = os.path.join(xia2_regression_build, "test_data", "i04_bag_training") g = glob.glob(os.path.join(data_dir, "*.cbf*")) if not g: pytest.skip( 'xia2_regression files not downloaded. Run xia2_regression.fetch_test_data first' ) tmpdir.chdir() # beam centre from image headers: 205.28,210.76 mm args = ["dials.import", "mosflm_beam_centre=207,212"] + g print(args) result = procrunner.run_process(args) assert result['stderr'] == '' and result['exitcode'] == 0 assert os.path.exists('datablock.json') # spot-finding, just need a subset of the data args = [ "dials.find_spots", "datablock.json", "scan_range=1,10", "scan_range=531,540" ] print(args) result = procrunner.run_process(args) assert result['stderr'] == '' and result['exitcode'] == 0 assert os.path.exists('strong.pickle') # actually run the beam centre search args = ["dials.search_beam_position", "datablock.json", "strong.pickle"] print(args) result = procrunner.run_process(args) assert result['stderr'] == '' and result['exitcode'] == 0 assert os.path.exists('optimized_datablock.json') # look at the results from dxtbx.serialize import load datablocks = load.datablock("datablock.json", check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (scitbx.matrix.col(detector_1[0].get_origin()) - scitbx.matrix.col(detector_2[0].get_origin())) print(shift) # check we can actually index the resulting optimized datablock from cctbx import uctbx from dials.test.algorithms.indexing.test_index import run_one_indexing expected_unit_cell = uctbx.unit_cell( (57.780, 57.800, 150.017, 89.991, 89.990, 90.007)) expected_rmsds = (0.06, 0.05, 0.001) expected_hall_symbol = ' P 1' run_one_indexing((tmpdir / 'strong.pickle').strpath, (tmpdir / 'optimized_datablock.json').strpath, [], expected_unit_cell, expected_rmsds, expected_hall_symbol)
def install_git(**kwargs): reference = [] if os.name == 'posix': reference_repository_path = os.path.join('/dls/science/groups/scisoft/DIALS/repositories/git-reference', kwargs['package']) if os.path.isdir(reference_repository_path): reference = ['--reference', reference_repository_path] print("using reference repository...", end="") sys.stdout.flush() try: result = procrunner.run_process(['git', 'clone', '--recursive', kwargs['source'], kwargs['location']] + reference, print_stdout=False) if result['exitcode']: return False if reference: oldcwd = os.getcwd() os.chdir(kwargs['location']) result = procrunner.run_process(['git', 'repack', '-a', '-d'], print_stderr=True) os.chdir(oldcwd) assert result['exitcode'] == 0, "Repack operation failed. Delete repository and try again." os.remove(os.path.join(kwargs['location'], '.git', 'objects', 'info', 'alternates')) Toolbox.set_git_repository_config_to_rebase(os.path.join(kwargs['location'], '.git', 'config')) return True except OSError: if os.path.isdir(kwargs['location']): shutil.rmtree(kwargs['location']) return False # git may not be installed
def test(dials_regression, tmpdir): tmpdir.chdir() images = glob( os.path.join(dials_regression, "centroid_test_data", "centroid*.cbf")) result = procrunner.run_process([ "dials.find_spots", "output.datablock=datablock.json", "output.reflections=spotfinder.pickle", "output.shoeboxes=True", ] + images) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("datablock.json") assert os.path.exists("spotfinder.pickle") result = procrunner.run_process([ "dials.find_hot_pixels", "input.datablock=datablock.json", "input.reflections=spotfinder.pickle", "output.mask=hot_mask.pickle" ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("hot_mask.pickle") assert "Found 8 hot pixels" in result['stdout']
def test_output_rubbish(dials_regression, tmpdir): tmpdir.chdir() result = procrunner.run_process([ 'dials.index', os.path.join(dials_regression, "centroid_test_data", 'datablock.json'), os.path.join(dials_regression, "centroid_test_data", 'strong.pickle'), ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('experiments.json') assert os.path.exists('indexed.pickle') # Call dials.integrate result = procrunner.run_process([ 'dials.integrate', 'experiments.json', 'indexed.pickle', 'profile.fitting=False', 'prediction.padding=0', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('integrated.pickle') with open('integrated.pickle', 'rb') as fh: table = pickle.load(fh) assert table.get_flags(table.flags.bad_reference) > 0 assert 'id' in table for row in table: assert row['id'] == 0
def test_spots_xds(tmpdir): tmpdir.chdir() xds_input = 'SPOT.XDS' output_pickle = "spot.pickle" with open(xds_input, "wb") as f: f.write("""\ 2411.40 1000.70 25.00 16384. 0 0 0 1328.60 2170.40 20.57 7326. 0 0 0 177.56 2191.30 24.94 6779. 0 0 0 1231.34 1220.04 24.99 1952. 0 0 0 1227.07 1230.56 24.81 349. 0 0 0 1341.63 1243.25 5.64 321. 2 -2 11 1125.23 1197.72 12.14 231. -1 2 -10 1317.52 1171.59 19.28 120. 6 -4 6 1260.25 1300.55 13.67 116. -4 2 6 1090.27 1199.47 41.49 114. -2 3 -13 """) result = procrunner.run_process([ "dials.import_xds", xds_input, #xparm_file, "input.method=reflections", "output.filename=" + output_pickle, "remove_invalid=True", ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists(output_pickle) import six.moves.cPickle as pickle with open(output_pickle, "rb") as f: reflections = pickle.load(f) assert len(reflections) == 5 os.remove(xds_input) assert not os.path.exists(xds_input) # now test we can export it again result = procrunner.run_process([ "dials.export", "format=xds", output_pickle, ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists(os.path.join("xds", "SPOT.XDS")) with open(os.path.join("xds", "SPOT.XDS"), "rb") as f: txt = f.read() assert [line.strip() for line in txt.split('\n')] == \ [line.strip() for line in """\ 1341.63 1243.25 5.64 321.00 2 -2 11 1125.23 1197.72 12.14 231.00 -1 2 -10 1317.52 1171.59 19.28 120.00 6 -4 6 1260.25 1300.55 13.67 116.00 -4 2 6 1090.27 1199.47 41.49 114.00 -2 3 -13 """.split('\n')]
def test(dials_regression, tmpdir): tmpdir.chdir() data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_narrow_wedges") selection = (2, 3, 4, 5, 6, 7, 9, 11, 12, 13, 14, 17, 18, 19, 20) # Combine all the separate sweeps result = procrunner.run_process([ "dials.combine_experiments", "reference_from_experiment.beam=0", "reference_from_experiment.goniometer=0", "reference_from_experiment.detector=0", ] + [ "experiments={0}/data/sweep_%03d/experiments.json".format(data_dir) % n for n in selection ] + [ "reflections={0}/data/sweep_%03d/reflections.pickle".format(data_dir) % n for n in selection ]) assert result['exitcode'] == 0 assert result['stderr'] == '' # Do refinement and load the results # turn off outlier rejection so that test takes about 4s rather than 10s # set close_to_spindle_cutoff to old default result = procrunner.run_process([ "dials.refine", "combined_experiments.json", "combined_reflections.pickle", "outlier.algorithm=null", "close_to_spindle_cutoff=0.05", ]) assert result['exitcode'] == 0 assert result['stderr'] == '' refined_experiments = ExperimentListFactory.from_json_file( "refined_experiments.json", check_format=False) # Check results are as expected regression_experiments = ExperimentListFactory.from_json_file( os.path.join(data_dir, "regression_experiments.json"), check_format=False) for e1, e2 in zip(refined_experiments, regression_experiments): assert e1.crystal.is_similar_to(e2.crystal) # FIXME need is_similar_to for detector that checks geometry #assert e1.detector == e2.detector s0_1 = matrix.col(e1.beam.get_unit_s0()) s0_2 = matrix.col(e1.beam.get_unit_s0()) assert s0_1.accute_angle(s0_2, deg=True) < 0.0057 # ~0.1 mrad
def test_combining_spots(dials_regression, tmpdir): tmpdir.chdir() images = sorted( glob( os.path.join(dials_regression, 'centroid_test_data', "centroid*.cbf"))) images_1 = images[0:int(len(images) / 2)] images_2 = images[int(len(images) / 2):] result = run_process(['dials.import'] + images_1 + ['output.datablock=datablock-1.json']) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('datablock-1.json') result = run_process([ 'dials.find_spots', 'datablock-1.json', 'output.reflections=strong-1.pickle' ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('strong-1.pickle') result = run_process(['dials.import'] + images_2 + ['output.datablock=datablock-2.json']) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('datablock-2.json') result = run_process([ 'dials.find_spots', 'datablock-2.json', 'output.reflections=strong-2.pickle' ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('strong-2.pickle') result = run_process([ 'dials.combine_found_spots', 'datablock-1.json', 'datablock-2.json', 'strong-1.pickle', 'strong-2.pickle', 'output.reflections=combined.pickle', 'output.datablock=combined.json', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('combined.json') assert os.path.exists('combined.pickle')
def test_rs_mapper(dials_regression, tmpdir): tmpdir.chdir() result = procrunner.run_process([ 'dials.rs_mapper', os.path.join(dials_regression, "centroid_test_data", "datablock.json"), 'map_file="junk.ccp4"', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('junk.ccp4') # load results from iotbx import ccp4_map from scitbx.array_family import flex m = ccp4_map.map_reader(file_name="junk.ccp4") assert len(m.data) == 7189057 assert m.header_min == -1.0 assert flex.min(m.data) == -1.0 assert m.header_max == 2052.75 assert flex.max(m.data) == 2052.75 assert m.header_mean == pytest.approx(0.018606403842568398, abs=1e-6) assert flex.mean(m.data) == pytest.approx(0.018606403842568398, abs=1e-6)
def test(dials_regression, tmpdir): tmpdir.chdir() input_filename = os.path.join(dials_regression, "centroid_test_data", "datablock.json") mask_filename = os.path.join(dials_regression, "centroid_test_data", "lookup_mask.pickle") output_filename = "output_datablock.json" result = procrunner.run_process([ 'dials.apply_mask', 'input.datablock=%s' % input_filename, 'input.mask=%s' % mask_filename, 'output.datablock=%s' % output_filename, ]) assert result['exitcode'] == 0 assert result['stderr'] == '' from dials.array_family import flex # import dependency from dxtbx.datablock import DataBlockFactory datablocks = DataBlockFactory.from_json_file(output_filename) assert len(datablocks) == 1 imagesets = datablocks[0].extract_imagesets() assert len(imagesets) == 1 imageset = imagesets[0] assert imageset.external_lookup.mask.filename == mask_filename
def test_thing_1(tmpdir, dials_regression): '''Would you like to know more about what this test is supposed to do? I would love to. Always remember to use descriptive names.''' data_dir = os.path.join(dials_regression, "indexing_test_data", "trypsin") pickle_path1 = os.path.join(data_dir, "strong_P1_X6_1_0-1.pickle") pickle_path2 = os.path.join(data_dir, "strong_P1_X6_2_0-1.pickle") datablock_path1 = os.path.join(data_dir, "datablock_P1_X6_1.json") datablock_path2 = os.path.join(data_dir, "datablock_P1_X6_2.json") tmpdir.chdir() args = [ "dials.search_beam_position", datablock_path1, datablock_path2, pickle_path1, pickle_path2 ] print(args) result = procrunner.run_process(args) assert result['stderr'] == '' and result['exitcode'] == 0 assert os.path.exists('optimized_datablock.json') from dxtbx.serialize import load datablocks = load.datablock(datablock_path1, check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (scitbx.matrix.col(detector_1[0].get_origin()) - scitbx.matrix.col(detector_2[0].get_origin())) assert shift.elems == pytest.approx((0.037, 0.061, 0.0), abs=1e-1)
def test_find_spots_with_hot_mask_with_prefix(dials_regression, tmpdir): tmpdir.chdir() # now write a hot mask result = procrunner.run_process([ "dials.find_spots", "write_hot_mask=True", "hot_mask_prefix=my_hot_mask", "output.reflections=spotfinder.pickle", "output.shoeboxes=False", ] + glob( os.path.join(dials_regression, "centroid_test_data", "centroid*.cbf"))) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("spotfinder.pickle") assert os.path.exists("my_hot_mask_0.pickle") with open("spotfinder.pickle", "rb") as f: reflections = pickle.load(f) assert len(reflections) == 653 assert "shoebox" not in reflections with open("my_hot_mask_0.pickle", "rb") as f: mask = pickle.load(f) assert len(mask) == 1 assert mask[0].count(False) == 12
def test_find_spots_with_user_defined_mask(dials_regression, tmpdir): tmpdir.chdir() # Now with a user defined mask result = procrunner.run_process([ "dials.find_spots", "output.reflections=spotfinder.pickle", "output.shoeboxes=True", "lookup.mask=" + os.path.join(dials_regression, "centroid_test_data", "mask.pickle"), ] + glob( os.path.join(dials_regression, "centroid_test_data", "centroid*.cbf"))) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("spotfinder.pickle") with open("spotfinder.pickle", "rb") as f: reflections = pickle.load(f) from dxtbx.datablock import DataBlockFactory datablocks = DataBlockFactory.from_json_file( os.path.join(dials_regression, "centroid_test_data", "datablock.json")) assert len(datablocks) == 1 imageset = datablocks[0].extract_imagesets()[0] detector = imageset.get_detector() beam = imageset.get_beam() for x, y, z in reflections['xyzobs.px.value']: d = detector[0].get_resolution_at_pixel(beam.get_s0(), (x, y)) assert d >= 3
def test_json(dials_regression, tmpdir): tmpdir.chdir() # Call dials.export result = procrunner.run_process([ 'dials.export', 'format=json', os.path.join(dials_regression, "centroid_test_data", "datablock.json"), os.path.join(dials_regression, "centroid_test_data", "strong.pickle"), ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('rlp.json') from dxtbx.datablock import DataBlockFactory with open('rlp.json', 'rb') as f: d = json.load(f) assert d.keys() == ['imageset_id', 'datablocks', 'rlp', 'experiment_id'], d.keys() assert d['rlp'][:3] == [0.123454, 0.57687, 0.186465], d['rlp'][:3] assert d['imageset_id'][0] == 0 assert d['experiment_id'][0] == 0 assert len(d['datablocks']) == 1 db = DataBlockFactory.from_dict(d['datablocks']) imgset = db[0].extract_imagesets() assert len(imgset) == 1
def test_json_shortened(dials_regression, tmpdir): tmpdir.chdir() # Call dials.export result = procrunner.run_process([ 'dials.export', 'format=json', os.path.join(dials_regression, "centroid_test_data", "experiments.json"), os.path.join(dials_regression, "centroid_test_data", "integrated.pickle"), 'json.filename=integrated.json', 'n_digits=4', 'compact=False', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('integrated.json') with open('integrated.json', 'rb') as f: d = json.load(f) assert d.keys() == ['imageset_id', 'rlp', 'experiment_id'], d.keys() assert d['rlp'][:3] == [-0.5975, -0.6141, 0.4702], d['rlp'][:3] assert d['imageset_id'][0] == 0 assert d['experiment_id'][0] == 0
def test_multi_lattice(dials_regression, tmpdir): tmpdir.chdir() # Call dials.integrate result = procrunner.run_process([ 'dials.integrate', os.path.join(dials_regression, "integration_test_data", 'multi_lattice', 'experiments.json'), os.path.join(dials_regression, "integration_test_data", 'multi_lattice', 'indexed.pickle'), 'prediction.padding=0', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('integrated.pickle') with open('integrated.pickle', 'rb') as fh: table = pickle.load(fh) assert len(table) == 5605 # Check output contains from two lattices exp_id = list(set(table['id'])) assert len(exp_id) == 2 # Check both lattices have integrated reflections mask = table.get_flags(table.flags.integrated_prf) table = table.select(mask) exp_id = list(set(table['id'])) assert len(exp_id) == 2
def test_import_integrate_hkl(dials_regression, tmpdir): tmpdir.chdir() from dials.array_family import flex # import dependency result = procrunner.run_process([ 'dials.import_xds', 'input.method=reflections', os.path.join(dials_regression, "centroid_test_data", 'INTEGRATE.HKL'), os.path.join(dials_regression, "centroid_test_data", "experiments.json"), ]) assert result['exitcode'] == 0 assert result['stderr'] == '' with open('integrate_hkl.pickle', 'rb') as fh: table = pickle.load(fh) assert 'miller_index' in table assert 'id' in table assert 'panel' in table assert 'xyzcal.px' in table assert 'xyzobs.px.value' in table assert 'intensity.cor.value' in table assert 'intensity.cor.variance' in table assert len(table) == 174911
def test_scan_varying_refinement_of_a_multiple_panel_detector(dials_regression, tmpdir): from dials.array_family import flex tmpdir.chdir() result = procrunner.run_process([ "dials.refine", os.path.join(dials_regression, "refinement_test_data", "i23_as_24_panel_barrel", 'experiments.json'), os.path.join(dials_regression, "refinement_test_data", "i23_as_24_panel_barrel", 'indexed.pickle'), "scan_varying=true", "history=history.pickle", "outlier.separate_blocks=False", ]) assert result['exitcode'] == 0 assert result['stderr'] == '' # there are plenty of things we could do with the refinement history, but # here just check that final RMSDs are low enough with open('history.pickle', 'rb') as f: history = pickle.load(f) final_rmsd = history['rmsd'][-1] assert final_rmsd[0] < 0.05 assert final_rmsd[1] < 0.04 assert final_rmsd[2] < 0.0002 # also check that the used_in_refinement flag got set correctly rt = flex.reflection_table.from_pickle('refined.pickle') uir = rt.get_flags(rt.flags.used_in_refinement) assert uir.count(True) == history['num_reflections'][-1]
def test_multi_sweep(dials_regression, tmpdir): tmpdir.chdir() # Call dials.integrate result = procrunner.run_process([ 'dials.integrate', os.path.join(dials_regression, "integration_test_data", 'multi_sweep', 'experiments.json'), os.path.join(dials_regression, "integration_test_data", 'multi_sweep', 'indexed.pickle'), 'prediction.padding=0', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('integrated.pickle') with open('integrated.pickle', 'rb') as fh: table = pickle.load(fh) assert len(table) == 4020 # Check the results T1 = table[:2010] T2 = table[2010:] ID1 = list(set(T1['id'])) ID2 = list(set(T2['id'])) assert len(ID1) == 1 assert len(ID2) == 1 assert ID1[0] == 0 assert ID2[0] == 1 I1 = T1['intensity.prf.value'] I2 = T2['intensity.prf.value'] F1 = T1.get_flags(T1.flags.integrated_prf) F2 = T2.get_flags(T2.flags.integrated_prf) assert F1 == F2 I1 = I1.select(F1) I2 = I2.select(F2) assert flex.abs(I1 - I2) < 1e-6
def test_xds_ascii(dials_regression, tmpdir): tmpdir.chdir() # Call dials.export result = procrunner.run_process([ 'dials.export', 'summation=true', 'format=xds_ascii', os.path.join(dials_regression, "centroid_test_data", "experiments.json"), os.path.join(dials_regression, "centroid_test_data", "integrated.pickle"), ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("DIALS.HKL") psi_values = { (-9, 7, -10): 153.430361, (-5, 11, -26): 175.559441, (-4, 23, 24): 129.468070, (2, 10, 20): 147.947274 } for record in open('DIALS.HKL', 'r'): if record.startswith('!'): continue tokens = record.split() hkl = tuple(map(int, tokens[:3])) if not hkl in psi_values: continue psi = float(tokens[-1]) assert psi == pytest.approx(psi_values[hkl], abs=0.1)
def run_process(command): """Runs a command, prints running info and results the result, if success""" os.environ["DIALS_NOBANNER"] = "1" result = run_process(shlex.split(command)) print("running command took {0:.2f} seconds\n".format( result['runtime'])) assert result['exitcode'] == 0, "Command execution failed" return result
def test_generate_mask_with_untrusted_circle(input_filename): result = procrunner.run_process([ 'dials.generate_mask', input_filename, 'output.mask=mask3.pickle', 'untrusted.circle=100,100,10' ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("mask3.pickle")
def test_generate_mask(input_filename): result = procrunner.run_process([ 'dials.generate_mask', input_filename, ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("mask.pickle")
def install_pip(**kwargs): git_installation = install_git(**kwargs) if not git_installation: return False result = procrunner.run_process(['libtbx.pip', 'install', '-e', kwargs['location']], print_stderr=True) if result['exitcode']: return False return True
def test_slow_fast_beam_centre(dials_regression, tmpdir): tmpdir.chdir() # test slow_fast_beam_centre with a multi-panel CS-PAD image impath = os.path.join(dials_regression, "image_examples", "LCLS_cspad_nexus", "idx-20130301060858401.cbf") result = run_process([ 'dials.import', 'slow_fast_beam_centre=134,42,18', 'output.datablock=slow_fast_beam_centre.json', impath ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('slow_fast_beam_centre.json') from dxtbx.serialize import load datablock = load.datablock('slow_fast_beam_centre.json')[0] imgset = datablock.extract_imagesets()[0] # beam centre on 18th panel s0 = imgset.get_beam().get_s0() beam_centre = imgset.get_detector()[18].get_beam_centre_px(s0) assert beam_centre == pytest.approx((42, 134)) # check relative panel positions have not changed from scitbx import matrix o = matrix.col(imgset.get_detector()[0].get_origin()) offsets = [] for p in imgset.get_detector(): intra_pnl = o - matrix.col(p.get_origin()) offsets.append(intra_pnl.length()) result = run_process( ['dials.import', 'output.datablock=reference.json', impath]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('reference.json') ref_db = load.datablock('reference.json')[0] ref_imset = ref_db.extract_imagesets()[0] o = matrix.col(ref_imset.get_detector()[0].get_origin()) ref_offsets = [] for p in ref_imset.get_detector(): intra_pnl = o - matrix.col(p.get_origin()) ref_offsets.append(intra_pnl.length()) assert offsets == pytest.approx(ref_offsets)
def test_generate_mask_with_ice_rings(input_filename): result = procrunner.run_process([ 'dials.generate_mask', input_filename, 'output.mask=mask6.pickle', 'ice_rings{filter=True;d_min=2}', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("mask6.pickle")
def test_generate_mask_with_resolution_range(input_filename): result = procrunner.run_process([ 'dials.generate_mask', input_filename, 'output.mask=mask4.pickle', 'resolution_range=2,3', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("mask4.pickle")
def test_multi_process_refinement_gives_same_results_as_single_process_refinement( dials_regression, tmpdir): tmpdir.chdir() data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_stills") cmd = [ "dials.refine", os.path.join(data_dir, "combined_experiments.json"), os.path.join(data_dir, "combined_reflections.pickle"), "outlier.algorithm=null", "engine=LBFGScurvs", "output.reflections=None" ] result = procrunner.run_process(cmd + [ "output.experiments=refined_experiments_nproc4.json", "nproc=4", ]) assert result['exitcode'] == 0 assert result['stderr'] == '' result = procrunner.run_process(cmd + [ "output.experiments=refined_experiments_nproc1.json", "nproc=1", ]) assert result['exitcode'] == 0 assert result['stderr'] == '' # load results nproc1 = ExperimentListFactory.from_json_file( "refined_experiments_nproc1.json", check_format=False) nproc4 = ExperimentListFactory.from_json_file( "refined_experiments_nproc4.json", check_format=False) # compare results for b1, b2 in zip(nproc1.beams(), nproc4.beams()): assert b1.is_similar_to(b2) for c1, c2 in zip(nproc1.crystals(), nproc4.crystals()): assert c1.is_similar_to(c2) for d1, d2 in zip(nproc1.detectors(), nproc4.detectors()): assert d1.is_similar_to(d2, fast_axis_tolerance=5e-5, slow_axis_tolerance=5e-5, origin_tolerance=5e-5)
def test_generate_mask_with_d_min_d_max(input_filename): result = procrunner.run_process([ 'dials.generate_mask', input_filename, 'output.mask=mask5.pickle', 'd_min=3', 'd_max=2', ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("mask5.pickle")
def test(dials_regression, tmpdir): tmpdir.chdir() result = procrunner.run_process([ 'dials.plot_scan_varying_model', os.path.join(dials_regression, "refinement_test_data", "multi_sweep_one_sample", "glucose_isomerase", "SWEEP1", "index", "sv_refined_experiments.json"), ]) assert result['exitcode'] == 0 assert result['stderr'] == ''