def tst_import_beam_centre(self): from glob import glob import os from libtbx import easy_run from dxtbx.serialize import load # Find the image files image_files = glob(os.path.join(self.path, "centroid*.cbf")) image_files = ' '.join(image_files) # provide mosflm beam centre to dials.import cmd = 'dials.import %s mosflm_beam_centre=100,200 output.datablock=mosflm_beam_centre.json' %image_files easy_run.fully_buffered(cmd) assert os.path.exists("mosflm_beam_centre.json") datablock = load.datablock("mosflm_beam_centre.json")[0] imgset = datablock.extract_imagesets()[0] beam_centre = imgset.get_detector()[0].get_beam_centre(imgset.get_beam().get_s0()) assert approx_equal(beam_centre, (200,100)) # provide an alternative datablock.json to get geometry from cmd = 'dials.import %s reference_geometry=mosflm_beam_centre.json output.datablock=mosflm_beam_centre2.json' %image_files easy_run.fully_buffered(cmd) assert os.path.exists("mosflm_beam_centre2.json") datablock = load.datablock("mosflm_beam_centre2.json")[0] imgset = datablock.extract_imagesets()[0] beam_centre = imgset.get_detector()[0].get_beam_centre(imgset.get_beam().get_s0()) assert approx_equal(beam_centre, (200,100)) print 'OK'
def tst_import_beam_centre(self): from glob import glob import os from libtbx import easy_run from dxtbx.serialize import load # Find the image files image_files = glob(os.path.join(self.path, "centroid*.cbf")) image_files = ' '.join(image_files) # provide mosflm beam centre to dials.import cmd = 'dials.import %s mosflm_beam_centre=100,200 output.datablock=mosflm_beam_centre.json' %image_files easy_run.fully_buffered(cmd).raise_if_errors() assert os.path.exists("mosflm_beam_centre.json") datablock = load.datablock("mosflm_beam_centre.json")[0] imgset = datablock.extract_imagesets()[0] beam_centre = imgset.get_detector()[0].get_beam_centre(imgset.get_beam().get_s0()) assert approx_equal(beam_centre, (200,100)) # provide an alternative datablock.json to get geometry from cmd = 'dials.import %s reference_geometry=mosflm_beam_centre.json output.datablock=mosflm_beam_centre2.json' %image_files easy_run.fully_buffered(cmd).raise_if_errors() assert os.path.exists("mosflm_beam_centre2.json") datablock = load.datablock("mosflm_beam_centre2.json")[0] imgset = datablock.extract_imagesets()[0] beam_centre = imgset.get_detector()[0].get_beam_centre(imgset.get_beam().get_s0()) assert approx_equal(beam_centre, (200,100)) print 'OK'
def exercise_1(): if not have_dials_regression: print "Skipping exercise(): dials_regression not available." return data_dir = os.path.join(dials_regression, "indexing_test_data", "trypsin") pickle_path1 = os.path.join(data_dir, "strong_P1_X6_1_0-1.pickle") pickle_path2 = os.path.join(data_dir, "strong_P1_X6_2_0-1.pickle") datablock_path1 = os.path.join(data_dir, "datablock_P1_X6_1.json") datablock_path2 = os.path.join(data_dir, "datablock_P1_X6_2.json") args = [ "dials.discover_better_experimental_model", datablock_path1, datablock_path2, pickle_path1, pickle_path2 ] command = " ".join(args) print command cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory() os.chdir(tmp_dir) result = easy_run.fully_buffered(command=command).raise_if_errors() assert os.path.exists('optimized_datablock.json') from dxtbx.serialize import load datablocks = load.datablock(datablock_path1, check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (matrix.col(detector_1[0].get_origin()) - matrix.col(detector_2[0].get_origin())) assert approx_equal(shift.elems, (0.037, 0.061, 0.0), eps=1e-1) os.chdir(cwd)
def exercise_3(): if not have_dials_regression: print "Skipping exercise(): dials_regression not available." return data_dir = os.path.join(dials_regression, "indexing_test_data", "phi_scan") pickle_path = os.path.join(data_dir, "strong.pickle") datablock_path = os.path.join(data_dir, "datablock.json") args = ["dials.discover_better_experimental_model", datablock_path, pickle_path] command = " ".join(args) print command cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory() os.chdir(tmp_dir) result = easy_run.fully_buffered(command=command).raise_if_errors() assert os.path.exists('optimized_datablock.json') from dxtbx.serialize import load datablocks = load.datablock(datablock_path, check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (matrix.col(detector_1[0].get_origin()) - matrix.col(detector_2[0].get_origin())) assert approx_equal(shift.elems, (-1.1091, 2.1695, 0.0), eps=1e-1) os.chdir(cwd)
def test_thing_2(tmpdir, xia2_regression_build): '''Would you like to know more about what this test is supposed to do? I would love to. Always remember to use descriptive names.''' data_dir = os.path.join(xia2_regression_build, "test_data", "i04_bag_training") g = glob.glob(os.path.join(data_dir, "*.cbf*")) if not g: pytest.skip( 'xia2_regression files not downloaded. Run xia2_regression.fetch_test_data first' ) tmpdir.chdir() # beam centre from image headers: 205.28,210.76 mm args = ["dials.import", "mosflm_beam_centre=207,212"] + g print(args) result = procrunner.run_process(args) assert result['stderr'] == '' and result['exitcode'] == 0 assert os.path.exists('datablock.json') # spot-finding, just need a subset of the data args = [ "dials.find_spots", "datablock.json", "scan_range=1,10", "scan_range=531,540" ] print(args) result = procrunner.run_process(args) assert result['stderr'] == '' and result['exitcode'] == 0 assert os.path.exists('strong.pickle') # actually run the beam centre search args = ["dials.search_beam_position", "datablock.json", "strong.pickle"] print(args) result = procrunner.run_process(args) assert result['stderr'] == '' and result['exitcode'] == 0 assert os.path.exists('optimized_datablock.json') # look at the results from dxtbx.serialize import load datablocks = load.datablock("datablock.json", check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (scitbx.matrix.col(detector_1[0].get_origin()) - scitbx.matrix.col(detector_2[0].get_origin())) print(shift) # check we can actually index the resulting optimized datablock from cctbx import uctbx from dials.test.algorithms.indexing.test_index import run_one_indexing expected_unit_cell = uctbx.unit_cell( (57.780, 57.800, 150.017, 89.991, 89.990, 90.007)) expected_rmsds = (0.06, 0.05, 0.001) expected_hall_symbol = ' P 1' run_one_indexing((tmpdir / 'strong.pickle').strpath, (tmpdir / 'optimized_datablock.json').strpath, [], expected_unit_cell, expected_rmsds, expected_hall_symbol)
def test_thing_1(tmpdir, dials_regression): '''Would you like to know more about what this test is supposed to do? I would love to. Always remember to use descriptive names.''' data_dir = os.path.join(dials_regression, "indexing_test_data", "trypsin") pickle_path1 = os.path.join(data_dir, "strong_P1_X6_1_0-1.pickle") pickle_path2 = os.path.join(data_dir, "strong_P1_X6_2_0-1.pickle") datablock_path1 = os.path.join(data_dir, "datablock_P1_X6_1.json") datablock_path2 = os.path.join(data_dir, "datablock_P1_X6_2.json") tmpdir.chdir() args = [ "dials.search_beam_position", datablock_path1, datablock_path2, pickle_path1, pickle_path2 ] print(args) result = procrunner.run_process(args) assert result['stderr'] == '' and result['exitcode'] == 0 assert os.path.exists('optimized_datablock.json') from dxtbx.serialize import load datablocks = load.datablock(datablock_path1, check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (scitbx.matrix.col(detector_1[0].get_origin()) - scitbx.matrix.col(detector_2[0].get_origin())) assert shift.elems == pytest.approx((0.037, 0.061, 0.0), abs=1e-1)
def tst_multiple_sweeps(self): from glob import glob import os from libtbx import easy_run from dxtbx.serialize import load # Find the image files image_files = sorted(glob(os.path.join(self.path, "centroid*.cbf"))) del image_files[4] # Delete filename to force two sweeps image_files = ' '.join(image_files) # provide mosflm beam centre to dials.import cmd = 'dials.import %s output.datablock=datablock_multiple_sweeps.json' % (image_files) try: easy_run.fully_buffered(cmd).raise_if_errors() assert False, "Expected exception" except Exception: pass cmd = 'dials.import %s output.datablock=datablock_multiple_sweeps.json allow_multiple_sweeps=True' % (image_files) easy_run.fully_buffered(cmd).raise_if_errors() assert os.path.exists("datablock_multiple_sweeps.json") datablock = load.datablock("datablock_multiple_sweeps.json")[0] imgset = datablock.extract_imagesets() assert len(imgset) == 2 print 'OK'
def load_reference_geometry(self, params): ''' Load a reference geometry file ''' from collections import namedtuple # Load reference geometry reference_detector = None reference_beam = None if params.input.reference_geometry is not None: from dxtbx.serialize import load experiments, datablock = None, None try: experiments = load.experiment_list( params.input.reference_geometry, check_format=False) except Exception, e: datablock = load.datablock(params.input.reference_geometry) assert experiments or datablock, 'Could not import reference geometry' if experiments: assert len(experiments.detectors()) >= 1 assert len(experiments.beams()) >= 1 if len(experiments.detectors()) > 1: raise Sorry('The reference geometry file contains %d detector definitions, but only a single definition is allowed.' % len(experiments.detectors())) if len(experiments.beams()) > 1: raise Sorry('The reference geometry file contains %d beam definitions, but only a single definition is allowed.' % len(experiments.beams())) reference_detector = experiments.detectors()[0] reference_beam = experiments.beams()[0] reference_goniometer = experiments.goniometers()[0] else: assert len(datablock) == 1 imageset = datablock[0].extract_imagesets()[0] reference_detector = imageset.get_detector() reference_beam = imageset.get_beam() reference_goniometer = imageset.get_goniometer()
def run(): #space_group = sgtbx.space_group_info(number=random.randint(1,230)).group() space_group = sgtbx.space_group_info( number=random.choice((1,3,16,75,143,195))).group() unit_cell = any_compatible_unit_cell( space_group, volume=random.uniform(1e3, 1e6)) fname = os.path.join(dials_regression, "centroid_test_data", "datablock.json") datablock = load.datablock(fname, check_format=False) imageset = datablock[0].extract_imagesets()[0] scan = imageset.get_scan() scan.set_image_range((1,900)) crystal_model = generate_crystal(unit_cell, space_group) print crystal_model print unit_cell.minimum_cell() strong_spots = generate_spots( crystal_model, imageset.get_detector(), imageset.get_beam(), goniometer=imageset.get_goniometer(), scan=scan, sel_fraction=0.25) rmsds = (0.5, 0.5, 0.5) # px/image #print strong_spots[0]['xyzobs.px.value'] add_random_noise_xyz(datablock, strong_spots, rmsds) #print strong_spots[0]['xyzobs.px.value'] run_indexing(datablock, strong_spots, crystal_model, rmsds) print "OK"
def run(): #space_group = sgtbx.space_group_info(number=random.randint(1,230)).group() space_group = sgtbx.space_group_info(number=random.choice((1, 3, 16, 75, 143, 195))).group() unit_cell = any_compatible_unit_cell(space_group, volume=random.uniform(1e3, 1e6)) fname = os.path.join(dials_regression, "centroid_test_data", "datablock.json") datablock = load.datablock(fname, check_format=False) imageset = datablock[0].extract_imagesets()[0] scan = imageset.get_scan() scan.set_image_range((1, 900)) crystal_model = generate_crystal(unit_cell, space_group) print crystal_model print unit_cell.minimum_cell() strong_spots = generate_spots(crystal_model, imageset.get_detector(), imageset.get_beam(), goniometer=imageset.get_goniometer(), scan=scan, sel_fraction=0.25) rmsds = (0.5, 0.5, 0.5) # px/image #print strong_spots[0]['xyzobs.px.value'] add_random_noise_xyz(datablock, strong_spots, rmsds) #print strong_spots[0]['xyzobs.px.value'] run_indexing(datablock, strong_spots, crystal_model, rmsds) print "OK"
def test_index_synthetic(space_group, unit_cell_volume, dials_regression, tmpdir): tmpdir.chdir() space_group = sgtbx.space_group_info(symbol=space_group).group() unit_cell = any_compatible_unit_cell(space_group, volume=unit_cell_volume) fname = os.path.join(dials_regression, "centroid_test_data", "datablock.json") datablock = load.datablock(fname, check_format=False) imageset = datablock[0].extract_imagesets()[0] scan = imageset.get_scan() scan.set_image_range((1, 900)) crystal_model = generate_crystal(unit_cell, space_group) print(crystal_model) print(unit_cell.minimum_cell()) strong_spots = generate_spots(crystal_model, imageset.get_detector(), imageset.get_beam(), goniometer=imageset.get_goniometer(), scan=scan, sel_fraction=0.25) rmsds = (0.5, 0.5, 0.5) # px/image add_random_noise_xyz(datablock, strong_spots, rmsds) run_indexing(datablock, strong_spots, crystal_model, rmsds)
def test_search_small_molecule(dials_data, run_in_tmpdir): """Perform a beam-centre search on a multi-sweep data set.. Do the following: 1. Run dials.search_beam_centre on a single datablock and pickled reflection table containing multiple experiment IDs, as output by dials.find_spots; a) Check that the program exits correctly; b) Check that it produces the expected output datablock. 2. Check that the beam centre search has resulted in the expected shift in detector origin. """ data = dials_data("l_cysteine_dials_output") datablock_path = data.join("datablock.json").strpath pickle_path = data.join("strong.pickle").strpath args = ["dials.search_beam_position", datablock_path, pickle_path] print(args) result = procrunner.run(args) assert not result.returncode and not result.stderr assert os.path.exists("optimised.expt") from dxtbx.serialize import load datablocks = load.datablock(datablock_path, check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] detector_1 = original_imageset.get_detector() optimized_experiments = load.experiment_list("optimised.expt", check_format=False) detector_2 = optimized_experiments[0].detector shift = scitbx.matrix.col(detector_1[0].get_origin()) - scitbx.matrix.col( detector_2[0].get_origin()) print(shift) assert shift.elems == pytest.approx((0.11, -1.03, 0.0), abs=1e-1)
def load_reference_geometries(geometry_file_list): from dxtbx.serialize import load reference_components = [] for file in geometry_file_list: try: experiments = load.experiment_list(file, check_format=False) assert len(experiments.detectors()) == 1 assert len(experiments.beams()) == 1 reference_detector = experiments.detectors()[0] reference_beam = experiments.beams()[0] except Exception: datablock = load.datablock(file) assert len(datablock) == 1 imageset = datablock[0].extract_imagesets()[0] reference_detector = imageset.get_detector() reference_beam = imageset.get_beam() reference_components.append({ 'detector': reference_detector, 'beam': reference_beam, 'file': file }) import itertools for combination in itertools.combinations(reference_components, 2): if compare_geometries(combination[0]['detector'], combination[1]['detector']): from xia2.Handlers.Streams import Chatter Chatter.write( 'Reference geometries given in %s and %s are too similar' % (combination[0]['file'], combination[1]['file'])) raise Exception('Reference geometries too similar') return reference_components
def test_slow_fast_beam_centre(dials_regression, tmpdir): tmpdir.chdir() # test slow_fast_beam_centre with a multi-panel CS-PAD image impath = os.path.join(dials_regression, "image_examples", "LCLS_cspad_nexus", "idx-20130301060858401.cbf") result = run_process([ 'dials.import', 'slow_fast_beam_centre=134,42,18', 'output.datablock=slow_fast_beam_centre.json', impath ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('slow_fast_beam_centre.json') from dxtbx.serialize import load datablock = load.datablock('slow_fast_beam_centre.json')[0] imgset = datablock.extract_imagesets()[0] # beam centre on 18th panel s0 = imgset.get_beam().get_s0() beam_centre = imgset.get_detector()[18].get_beam_centre_px(s0) assert beam_centre == pytest.approx((42, 134)) # check relative panel positions have not changed from scitbx import matrix o = matrix.col(imgset.get_detector()[0].get_origin()) offsets = [] for p in imgset.get_detector(): intra_pnl = o - matrix.col(p.get_origin()) offsets.append(intra_pnl.length()) result = run_process( ['dials.import', 'output.datablock=reference.json', impath]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists('reference.json') ref_db = load.datablock('reference.json')[0] ref_imset = ref_db.extract_imagesets()[0] o = matrix.col(ref_imset.get_detector()[0].get_origin()) ref_offsets = [] for p in ref_imset.get_detector(): intra_pnl = o - matrix.col(p.get_origin()) ref_offsets.append(intra_pnl.length()) assert offsets == pytest.approx(ref_offsets)
def test_map_centroids_to_reciprocal_space(dials_regression): data_dir = os.path.join(dials_regression, "indexing_test_data", "i04_weak_data") pickle_path = os.path.join(data_dir, "full.pickle") sweep_path = os.path.join(data_dir, "datablock_orig.json") refl = flex.reflection_table.from_file(pickle_path) datablock = load.datablock(sweep_path, check_format=False)[0] imageset = datablock.extract_imagesets()[0] detector = imageset.get_detector() scan = imageset.get_scan() beam = imageset.get_beam() goniometer = imageset.get_goniometer() # check mm values not in assert "xyzobs.mm.value" not in refl refl.centroid_px_to_mm(detector, scan=scan) for k in ("xyzobs.mm.value", "xyzobs.mm.variance"): assert k in refl assert refl["xyzobs.mm.value"][0] == pytest.approx( (199.43400000000003, 11.908133333333334, 1.4324789835743459)) assert refl["xyzobs.mm.variance"][0] == pytest.approx( (0.0035346345381526106, 0.0029881028112449803, 5.711576621000785e-07)) refl.map_centroids_to_reciprocal_space(detector, beam, goniometer=goniometer) for k in ("s1", "rlp"): assert k in refl assert refl["s1"][0] == pytest.approx( (-0.035321308540942425, 0.6030297672949761, -0.8272574664632307)) assert refl["rlp"][0] == pytest.approx( (-0.035321308540942425, 0.27833194706770875, -0.5700990597173606)) # select only those centroids on the first image sel = refl["xyzobs.px.value"].parts()[2] < 1 refl1 = refl.select(sel) del refl1["xyzobs.mm.value"], refl1["xyzobs.mm.variance"], refl1[ "s1"], refl1["rlp"] # pretend this is a still and hence no scan or goniometer refl1.centroid_px_to_mm(detector, scan=None) refl1.map_centroids_to_reciprocal_space(detector, beam, goniometer=None) assert refl1["s1"][0] == pytest.approx( (-0.035321308540942425, 0.6030297672949761, -0.8272574664632307)) # numbers for rlp are different to above since for the goniometer case the # starting angle of the first image is non-zero, so the rlps are rotated back # to zero degrees assert refl1["rlp"][0] == pytest.approx( (-0.035321308540942425, 0.6030297672949761, 0.19707031842793443))
def run(file_names): if len(file_names) == 1 and file_names[0].endswith("json"): datablock = load.datablock(file_names[0]) assert len(datablock) == 1 sequence = datablock[0].extract_sequences()[0] else: sequence = ImageSetFactory.new(file_names)[0] detector = sequence.get_detector() beam = sequence.get_beam() print(detector.get_ray_intersection(beam.get_s0())[1])
def exercise_1(): if not have_dials_regression: print "Skipping exercise(): dials_regression not available." return data_dir = os.path.join(dials_regression, "indexing_test_data", "trypsin") pickle_path1 = os.path.join(data_dir, "strong_P1_X6_1_0-1.pickle") pickle_path2 = os.path.join(data_dir, "strong_P1_X6_2_0-1.pickle") datablock_path1 = os.path.join(data_dir, "datablock_P1_X6_1.json") datablock_path2 = os.path.join(data_dir, "datablock_P1_X6_2.json") args = ["dials.discover_better_experimental_model", datablock_path1, datablock_path2, pickle_path1, pickle_path2] command = " ".join(args) print command cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory() os.chdir(tmp_dir) result = easy_run.fully_buffered(command=command).raise_if_errors() assert os.path.exists('optimized_datablock.json') from dxtbx.serialize import load datablocks = load.datablock(datablock_path1, check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (matrix.col(detector_1[0].get_origin()) - matrix.col(detector_2[0].get_origin())) assert approx_equal(shift.elems, (-0.242, -0.125, 0.0), eps=1e-2) # This produces these two different solutions. # The two corresponding origin vectors are: # "origin": [ -208.507324496093, 209.20518907699287, -266.11 ] # "origin": [ -208.50831812992388, 209.20211805759828, -266.11 ] # The remainder of the optimized_datablock.json is identical. # # TODO: I don't know if both of these are legitimate, or if # this is a bug in discover_better_experimental_model. os.chdir(cwd)
def exercise_1(): if not have_dials_regression: print "Skipping exercise(): dials_regression not available." return data_dir = os.path.join(dials_regression, "indexing_test_data", "trypsin") pickle_path1 = os.path.join(data_dir, "strong_P1_X6_1_0-1.pickle") pickle_path2 = os.path.join(data_dir, "strong_P1_X6_2_0-1.pickle") datablock_path1 = os.path.join(data_dir, "datablock_P1_X6_1.json") datablock_path2 = os.path.join(data_dir, "datablock_P1_X6_2.json") args = [ "dials.discover_better_experimental_model", datablock_path1, datablock_path2, pickle_path1, pickle_path2 ] command = " ".join(args) print command cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory() os.chdir(tmp_dir) result = easy_run.fully_buffered(command=command).raise_if_errors() assert os.path.exists('optimized_datablock.json') from dxtbx.serialize import load datablocks = load.datablock(datablock_path1, check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (matrix.col(detector_1[0].get_origin()) - matrix.col(detector_2[0].get_origin())) assert approx_equal(shift.elems, (-0.242, -0.125, 0.0), eps=1e-2) # This produces these two different solutions. # The two corresponding origin vectors are: # "origin": [ -208.507324496093, 209.20518907699287, -266.11 ] # "origin": [ -208.50831812992388, 209.20211805759828, -266.11 ] # The remainder of the optimized_datablock.json is identical. # # TODO: I don't know if both of these are legitimate, or if # this is a bug in discover_better_experimental_model. os.chdir(cwd)
def run(file_names): if len(file_names) == 1 and file_names[0].endswith('json'): from dxtbx.serialize import load datablock = load.datablock(file_names[0]) assert(len(datablock) == 1) sweep = datablock[0].extract_sweeps()[0] else: from dxtbx.imageset import ImageSetFactory sweep = ImageSetFactory.new(file_names)[0] xsx = xds.to_xds(sweep) xsx.XDS_INP()
def run(file_names): if len(file_names) == 1 and file_names[0].endswith('json'): from dxtbx.serialize import load datablock = load.datablock(file_names[0]) assert (len(datablock) == 1) sweep = datablock[0].extract_sweeps()[0] else: from dxtbx.imageset import ImageSetFactory sweep = ImageSetFactory.new(file_names)[0] detector = sweep.get_detector() beam = sweep.get_beam() print detector.get_ray_intersection(beam.get_s0())[1]
def run(file_names): if len(file_names) == 1 and file_names[0].endswith('json'): from dxtbx.serialize import load datablock = load.datablock(file_names[0]) assert(len(datablock) == 1) sweep = datablock[0].extract_sweeps()[0] else: from dxtbx.imageset import ImageSetFactory sweep = ImageSetFactory.new(file_names)[0] detector = sweep.get_detector() beam = sweep.get_beam() print detector.get_ray_intersection(beam.get_s0())[1]
def run(file_names): if len(file_names) == 1 and file_names[0].endswith('json'): from dxtbx.serialize import load try: datablock = load.datablock(file_names[0]) assert len(datablock) == 1 sweep = datablock[0].extract_sweeps()[0] except ValueError, e: if str(e) == '"__id__" does not equal "imageset"': experiments = load.experiment_list(file_names[0]) assert len(experiments) == 1 sweep = experiments[0].imageset else: raise
def tst_import_beam_centre(dials_regression, tmpdir): tmpdir.chdir() # Find the image files image_files = glob( os.path.join(dials_regression, "centroid_test_data", "centroid*.cbf")) image_files = ' '.join(image_files) # provide mosflm beam centre to dials.import result = run_process([ 'dials.import', 'mosflm_beam_centre=100,200', 'output.datablock=mosflm_beam_centre.json' ] + image_files) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("mosflm_beam_centre.json") from dxtbx.serialize import load datablock = load.datablock("mosflm_beam_centre.json")[0] imgset = datablock.extract_imagesets()[0] beam_centre = imgset.get_detector()[0].get_beam_centre( imgset.get_beam().get_s0()) assert beam_centre == pytest.approx((200, 100)) # provide an alternative datablock.json to get geometry from result = run_process([ 'dials.import', 'reference_geometry=mosflm_beam_centre.json', 'output.datablock=mosflm_beam_centre2.json' ] + image_files) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("mosflm_beam_centre2.json") datablock = load.datablock("mosflm_beam_centre2.json")[0] imgset = datablock.extract_imagesets()[0] beam_centre = imgset.get_detector()[0].get_beam_centre( imgset.get_beam().get_s0()) assert beam_centre == pytest.approx((200, 100))
def test_generate_mask_with_untrusted_rectangle(input_filename): result = procrunner.run_process([ 'dials.generate_mask', input_filename, 'output.mask=mask2.pickle', 'output.datablock=masked_datablock.json', 'untrusted.rectangle=100,200,100,200' ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("mask2.pickle") assert os.path.exists("masked_datablock.json") from dxtbx.serialize import load datablocks = load.datablock("masked_datablock.json") imageset = datablocks[0].extract_imagesets()[0] assert imageset.external_lookup.mask.filename == os.path.join( os.path.abspath(os.getcwd()), 'mask2.pickle')
def run(self): ''' Parse the options. ''' from dxtbx.datablock import DataBlockFactory from dxtbx.datablock import DataBlockTemplateImporter from dials.util.options import flatten_datablocks from dials.util import log from logging import info, debug import cPickle as pickle from libtbx.utils import Sorry # Parse the command line arguments params, options = self.parser.parse_args(show_diff_phil=False) datablocks = flatten_datablocks(params.input.datablock) # Configure logging log.config( params.verbosity, info=params.output.log, debug=params.output.debug_log) from dials.util.version import dials_version info(dials_version()) # Log the diff phil diff_phil = self.parser.diff_phil.as_str() if diff_phil is not '': info('The following parameters have been modified:\n') info(diff_phil) # Load reference geometry reference_detector = None reference_beam = None if params.input.reference_geometry is not None: from dxtbx.serialize import load try: experiments = load.experiment_list( params.input.reference_geometry, check_format=False) assert len(experiments.detectors()) == 1 assert len(experiments.beams()) == 1 reference_detector = experiments.detectors()[0] reference_beam = experiments.beams()[0] except Exception, e: datablock = load.datablock(params.input.reference_geometry) assert len(datablock) == 1 imageset = datablock[0].extract_imagesets()[0] reference_detector = imageset.get_detector() reference_beam = imageset.get_beam()
def load_datablock(filename): from xia2.Schema import imageset_cache, update_with_reference_geometry from dxtbx.serialize import load datablocks = load.datablock(filename, check_format=False) for datablock in datablocks: imagesets = datablock.extract_imagesets() params = PhilIndex.get_python_object() reference_geometry = params.xia2.settings.input.reference_geometry if reference_geometry is not None and len(reference_geometry) > 0: update_with_reference_geometry(imagesets, reference_geometry) for imageset in imagesets: template = imageset.get_template() if template not in imageset_cache: imageset_cache[template] = collections.OrderedDict() imageset_cache[template][imageset.get_scan().get_image_range() [0]] = imageset
def load_reference_geometries(geometry_file_list): from dxtbx.serialize import load reference_components = [] for file in geometry_file_list: try: experiments = load.experiment_list(file, check_format=False) assert len(experiments.detectors()) == 1 assert len(experiments.beams()) == 1 reference_detector = experiments.detectors()[0] reference_beam = experiments.beams()[0] except Exception, e: datablock = load.datablock(file) assert len(datablock) == 1 imageset = datablock[0].extract_imagesets()[0] reference_detector = imageset.get_detector() reference_beam = imageset.get_beam() reference_components.append({'detector': reference_detector, 'beam': reference_beam, 'file': file})
def run(file_names): if len(file_names) == 1 and file_names[0].endswith('json'): from dxtbx.serialize import load try: datablock = load.datablock(file_names[0]) assert len(datablock) == 1 sweep = datablock[0].extract_sweeps()[0] except ValueError as e: if str(e) == '"__id__" does not equal "imageset"': experiments = load.experiment_list(file_names[0]) assert len(experiments) == 1 sweep = experiments[0].imageset else: raise else: from dxtbx.imageset import ImageSetFactory sweep = ImageSetFactory.new(file_names)[0] xsx = xds.to_xds(sweep) xsx.XDS_INP()
def test_with_mask(dials_regression, tmpdir): tmpdir.chdir() # Find the image files image_files = glob( os.path.join(dials_regression, "centroid_test_data", "centroid*.cbf")) mask_filename = os.path.join(dials_regression, "centroid_test_data", "mask.pickle") result = run_process( ['dials.import'] + image_files + ['mask=' + mask_filename, 'output.datablock=datablock_with_mask.json']) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("datablock_with_mask.json") from dxtbx.serialize import load datablock = load.datablock("datablock_with_mask.json")[0] imgset = datablock.extract_imagesets()[0] assert imgset.external_lookup.mask.filename == mask_filename
def tst_with_mask(self): from glob import glob import os from libtbx import easy_run from dxtbx.serialize import load # Find the image files image_files = glob(os.path.join(self.path, "centroid*.cbf")) image_files = ' '.join(image_files) mask_filename = os.path.join(self.path, "mask.pickle") # provide mosflm beam centre to dials.import cmd = 'dials.import %s mask=%s output.datablock=datablock_with_mask.json' % (image_files, mask_filename) easy_run.fully_buffered(cmd) assert os.path.exists("datablock_with_mask.json") datablock = load.datablock("datablock_with_mask.json")[0] imgset = datablock.extract_imagesets()[0] assert imgset.external_lookup.mask.filename == mask_filename print 'OK'
def tst_with_mask(self): from glob import glob import os from libtbx import easy_run from dxtbx.serialize import load # Find the image files image_files = glob(os.path.join(self.path, "centroid*.cbf")) image_files = ' '.join(image_files) mask_filename = os.path.join(self.path, "mask.pickle") # provide mosflm beam centre to dials.import cmd = 'dials.import %s mask=%s output.datablock=datablock_with_mask.json' % (image_files, mask_filename) easy_run.fully_buffered(cmd).raise_if_errors() assert os.path.exists("datablock_with_mask.json") datablock = load.datablock("datablock_with_mask.json")[0] imgset = datablock.extract_imagesets()[0] assert imgset.external_lookup.mask.filename == mask_filename print 'OK'
def test_multiple_sweep_import_suceeds_with_allow_parameter( dials_regression, tmpdir): tmpdir.chdir() # Find the image files image_files = sorted( glob( os.path.join(dials_regression, "centroid_test_data", "centroid*.cbf"))) del image_files[4] # Delete filename to force two sweeps result = run_process(['dials.import'] + image_files + [ 'output.datablock=datablock_multiple_sweeps.json', 'allow_multiple_sweeps=True' ]) assert result['exitcode'] == 0 assert result['stderr'] == '' assert os.path.exists("datablock_multiple_sweeps.json") from dxtbx.serialize import load datablock = load.datablock("datablock_multiple_sweeps.json")[0] imgset = datablock.extract_imagesets() assert len(imgset) == 2
def run(args): from dials.util.options import OptionParser from dials.util.options import flatten_datablocks import libtbx.load_env usage = "%s [options] datablock.json reference=reference_datablock.json" %( libtbx.env.dispatcher_name) parser = OptionParser( usage=usage, phil=phil_scope, read_datablocks=True, check_format=False, epilog=help_message) params, options = parser.parse_args(show_diff_phil=True) datablocks = flatten_datablocks(params.input.datablock) if len(datablocks) == 0: parser.print_help() exit() # Load reference geometry reference_detector = None if params.input.reference is not None: from dxtbx.serialize import load try: reference_experiments = load.experiment_list( params.input.reference, check_format=False) assert len(reference_experiments.detectors()) == 1 reference_detector = reference_experiments.detectors()[0] except Exception, e: reference_datablocks = load.datablock(params.input.reference) assert len(reference_datablocks) == 1 imageset = reference_datablocks[0].extract_imagesets()[0] reference_detector = imageset.get_detector()
def load_reference_geometry(self, params): ''' Load a reference geoetry file ''' from collections import namedtuple # Load reference geometry reference_detector = None reference_beam = None if params.input.reference_geometry is not None: from dxtbx.serialize import load try: experiments = load.experiment_list( params.input.reference_geometry, check_format=False) assert len(experiments.detectors()) == 1 assert len(experiments.beams()) == 1 reference_detector = experiments.detectors()[0] reference_beam = experiments.beams()[0] except Exception, e: datablock = load.datablock(params.input.reference_geometry) assert len(datablock) == 1 imageset = datablock[0].extract_imagesets()[0] reference_detector = imageset.get_detector() reference_beam = imageset.get_beam()
def run(): have_dials_regression = libtbx.env.has_module("dials_regression") if not have_dials_regression: print "Skipped: dials_regression not available" return dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) from dials.test.algorithms.indexing.tst_index import run_one_indexing expected_unit_cell = uctbx.unit_cell( (11.624, 13.550, 30.103, 89.964, 93.721, 90.132)) expected_rmsds = (0.039, 0.035, 0.002) datablock_old = os.path.join( dials_regression, "indexing_test_data/phi_scan/datablock_old.json") datablock_new = os.path.join( dials_regression, "indexing_test_data/phi_scan/datablock.json") strong_pickle = os.path.join( dials_regression, "indexing_test_data/phi_scan/strong.pickle") from dxtbx.serialize import load imageset_old = load.datablock( datablock_old, check_format=False)[0].extract_imagesets()[0] imageset_new = load.datablock( datablock_new, check_format=False)[0].extract_imagesets()[0] gonio_old = imageset_old.get_goniometer() gonio_new = imageset_new.get_goniometer() assert approx_equal( gonio_old.get_rotation_axis(), (0.7497646259807715, -0.5517923303436749, 0.36520984351713554)) assert approx_equal( gonio_old.get_setting_rotation(), (1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) assert approx_equal( gonio_old.get_fixed_rotation(), (0.7497646259807748, -0.20997265900532208, -0.6275065641872948, -0.5517923303436731, 0.3250014637526764, -0.7680490041218182, 0.3652098435171313, 0.9221092836691605, 0.12781329809272568)) assert approx_equal( gonio_new.get_rotation_axis(), gonio_old.get_rotation_axis()) assert approx_equal(gonio_new.get_rotation_axis_datum(), (1,0,0)) assert approx_equal( gonio_new.get_setting_rotation(), (0.7497646259807705, -0.20997265900532142, -0.6275065641873, -0.5517923303436786, 0.3250014637526763, -0.768049004121814, 0.3652098435171315, 0.9221092836691607, 0.12781329809272335)) assert approx_equal( gonio_new.get_fixed_rotation(), (1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) result_old = run_one_indexing( pickle_path=strong_pickle, sweep_path=datablock_old, extra_args=[], expected_unit_cell=expected_unit_cell, expected_rmsds=expected_rmsds, expected_hall_symbol=' P 1', ) result_new = run_one_indexing( pickle_path=strong_pickle, sweep_path=datablock_new, extra_args=[], expected_unit_cell=expected_unit_cell, expected_rmsds=expected_rmsds, expected_hall_symbol=' P 1', ) assert approx_equal(result_old.rmsds, result_new.rmsds) assert approx_equal(result_old.crystal_model.get_unit_cell().parameters(), result_new.crystal_model.get_unit_cell().parameters())
def exercise_2(): from dials.test.algorithms.indexing.tst_index import run_one_indexing if not have_xia2_regression: print "Skipping exercise_2(): xia2_regression not available." return curdir = os.path.abspath(os.curdir) print curdir data_dir = os.path.join(xia2_regression, "test_data", "i04_bag_training") import glob g = glob.glob(os.path.join(data_dir, "*.cbf")) if len(g) == 0: print "Skipping exercise_2(): xia2_regression files not downloaded." print "Run xia2_regression.fetch_test_data first." return # beam centre from image headers: 205.28,210.76 mm args = ["dials.import", "mosflm_beam_centre=207,212"] + g command = " ".join(args) # print command result = easy_run.fully_buffered(command=command).raise_if_errors() assert os.path.exists("datablock.json") # spot-finding, just need a subset of the data args = ["dials.find_spots", "datablock.json", "scan_range=1,10", "scan_range=531,540"] command = " ".join(args) print command result = easy_run.fully_buffered(command=command).raise_if_errors() assert os.path.exists("strong.pickle") # actually run the beam centre search args = ["dials.discover_better_experimental_model", "datablock.json", "strong.pickle"] command = " ".join(args) print command result = easy_run.fully_buffered(command=command).raise_if_errors() result.show_stdout() assert os.path.exists("optimized_datablock.json") # look at the results from dxtbx.serialize import load datablocks = load.datablock("datablock.json", check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock("optimized_datablock.json", check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = matrix.col(detector_1[0].get_origin()) - matrix.col(detector_2[0].get_origin()) print shift # check we can actually index the resulting optimized datablock from cctbx import uctbx expected_unit_cell = uctbx.unit_cell((57.780, 57.800, 150.017, 89.991, 89.990, 90.007)) expected_rmsds = (0.06, 0.05, 0.001) expected_hall_symbol = " P 1" result = run_one_indexing( os.path.join(curdir, "strong.pickle"), os.path.join(curdir, "optimized_datablock.json"), [], expected_unit_cell, expected_rmsds, expected_hall_symbol, )
def run(space_group_info): datablock_json = os.path.join(dials_regression, "indexing_test_data", "i04_weak_data", "datablock_orig.json") datablock = load.datablock(datablock_json, check_format=False)[0] sweep = datablock.extract_imagesets()[0] sweep._indices = sweep._indices[:20] sweep.set_scan(sweep.get_scan()[:20]) import random space_group = space_group_info.group() unit_cell = space_group_info.any_compatible_unit_cell( volume=random.uniform(1e4, 1e6)) crystal_symmetry = crystal.symmetry(unit_cell=unit_cell, space_group=space_group) crystal_symmetry.show_summary() # the reciprocal matrix B = matrix.sqr(unit_cell.fractionalization_matrix()).transpose() U = random_rotation() A = U * B direct_matrix = A.inverse() cryst_model = Crystal(direct_matrix[0:3], direct_matrix[3:6], direct_matrix[6:9], space_group=space_group) experiment = Experiment(imageset=sweep, beam=sweep.get_beam(), detector=sweep.get_detector(), goniometer=sweep.get_goniometer(), scan=sweep.get_scan(), crystal=cryst_model) predicted_reflections = flex.reflection_table.from_predictions(experiment) use_fraction = 0.3 use_sel = flex.random_selection( len(predicted_reflections), int(use_fraction * len(predicted_reflections))) predicted_reflections = predicted_reflections.select(use_sel) miller_indices = predicted_reflections['miller_index'] miller_set = miller.set(crystal_symmetry, miller_indices, anomalous_flag=True) predicted_reflections['xyzobs.mm.value'] = predicted_reflections[ 'xyzcal.mm'] predicted_reflections['id'] = flex.int(len(predicted_reflections), 0) from dials.algorithms.indexing.indexer import indexer_base indexer_base.map_centroids_to_reciprocal_space(predicted_reflections, sweep.get_detector(), sweep.get_beam(), sweep.get_goniometer()) # check that local and global indexing worked equally well in absence of errors result = compare_global_local(experiment, predicted_reflections, miller_indices) assert result.misindexed_local == 0 assert result.misindexed_global == 0 a, b, c = map(matrix.col, cryst_model.get_real_space_vectors()) relative_error = 0.02 a *= (1 + relative_error) b *= (1 + relative_error) c *= (1 + relative_error) cryst_model2 = Crystal(a, b, c, space_group=space_group) experiment.crystal = cryst_model2 result = compare_global_local(experiment, predicted_reflections, miller_indices) # check that the local indexing did a better job given the errors in the basis vectors #assert result.misindexed_local < result.misindexed_global assert result.misindexed_local == 0 assert result.correct_local > result.correct_global # usually the number misindexed is much smaller than this assert result.misindexed_local < (0.001 * len(result.reflections_local)) # the reciprocal matrix A = matrix.sqr(cryst_model.get_A()) A = random_rotation(angle_max=0.03) * A direct_matrix = A.inverse() cryst_model2 = Crystal(direct_matrix[0:3], direct_matrix[3:6], direct_matrix[6:9], space_group=space_group) experiment.crystal = cryst_model2 result = compare_global_local(experiment, predicted_reflections, miller_indices) # check that the local indexing did a better job given the errors in the basis vectors assert result.misindexed_local <= result.misindexed_global, ( result.misindexed_local, result.misindexed_global) assert result.misindexed_local < 0.01 * result.correct_local assert result.correct_local > result.correct_global # usually the number misindexed is much smaller than this assert result.misindexed_local < (0.001 * len(result.reflections_local))
def run(space_group_info): datablock_json = os.path.join( dials_regression, "indexing_test_data", "i04_weak_data", "datablock_orig.json") datablock = load.datablock(datablock_json, check_format=False)[0] sweep = datablock.extract_imagesets()[0] sweep._indices = sweep._indices[:20] sweep.set_scan(sweep.get_scan()[:20]) import random space_group = space_group_info.group() unit_cell = space_group_info.any_compatible_unit_cell(volume=random.uniform(1e4,1e6)) crystal_symmetry = crystal.symmetry(unit_cell=unit_cell, space_group=space_group) crystal_symmetry.show_summary() # the reciprocal matrix B = matrix.sqr(unit_cell.fractionalization_matrix()).transpose() U = random_rotation() A = U * B direct_matrix = A.inverse() cryst_model = crystal_model(direct_matrix[0:3], direct_matrix[3:6], direct_matrix[6:9], space_group=space_group) experiment = Experiment(imageset=sweep, beam=sweep.get_beam(), detector=sweep.get_detector(), goniometer=sweep.get_goniometer(), scan=sweep.get_scan(), crystal=cryst_model) predicted_reflections = flex.reflection_table.from_predictions( experiment) use_fraction = 0.3 use_sel = flex.random_selection( len(predicted_reflections), int(use_fraction*len(predicted_reflections))) predicted_reflections = predicted_reflections.select(use_sel) miller_indices = predicted_reflections['miller_index'] miller_set = miller.set( crystal_symmetry, miller_indices, anomalous_flag=True) predicted_reflections['xyzobs.mm.value'] = predicted_reflections['xyzcal.mm'] predicted_reflections['id'] = flex.int(len(predicted_reflections), 0) from dials.algorithms.indexing.indexer import indexer_base indexer_base.map_centroids_to_reciprocal_space( predicted_reflections, sweep.get_detector(), sweep.get_beam(), sweep.get_goniometer()) # check that local and global indexing worked equally well in absence of errors result = compare_global_local(experiment, predicted_reflections, miller_indices) assert result.misindexed_local == 0 assert result.misindexed_global == 0 a, b, c = cryst_model.get_real_space_vectors() relative_error = 0.02 a *= (1+relative_error) b *= (1+relative_error) c *= (1+relative_error) cryst_model2 = crystal_model(a, b, c, space_group=space_group) experiment.crystal = cryst_model2 result = compare_global_local(experiment, predicted_reflections, miller_indices) # check that the local indexing did a better job given the errors in the basis vectors #assert result.misindexed_local < result.misindexed_global assert result.misindexed_local == 0 assert result.correct_local > result.correct_global # usually the number misindexed is much smaller than this assert result.misindexed_local < (0.001 * len(result.reflections_local)) # the reciprocal matrix A = cryst_model.get_A() A = random_rotation(angle_max=0.03) * A direct_matrix = A.inverse() cryst_model2 = crystal_model(direct_matrix[0:3], direct_matrix[3:6], direct_matrix[6:9], space_group=space_group) experiment.crystal = cryst_model2 result = compare_global_local(experiment, predicted_reflections, miller_indices) # check that the local indexing did a better job given the errors in the basis vectors assert result.misindexed_local <= result.misindexed_global, ( result.misindexed_local, result.misindexed_global) assert result.misindexed_local < 0.01 * result.correct_local assert result.correct_local > result.correct_global # usually the number misindexed is much smaller than this assert result.misindexed_local < (0.001 * len(result.reflections_local))
def tst_override_geometry(self): from glob import glob import os from libtbx import easy_run from dxtbx.serialize import load # Find the image files image_files = glob(os.path.join(self.path, "centroid*.cbf")) image_files = ' '.join(image_files) # Write a geometry phil file with open("geometry.phil", "w") as outfile: outfile.write( ''' geometry { beam { wavelength = 2 direction = (-1,0,0) } detector { panel { name = "New panel" type = "New type" pixel_size = 10,20 image_size = 30,40 trusted_range = 50,60 thickness = 70 material = "New material" fast_axis = -1,0,0 slow_axis = 0,-1,0 origin = 100,100,100 } } goniometer { rotation_axis = 0,0,-1 fixed_rotation = 0,1,2,3,4,5,6,7,8 setting_rotation = 8,7,6,5,4,3,2,1,0 } scan { image_range = 1,4 oscillation = 1,2 } } ''') # provide mosflm beam centre to dials.import cmd = 'dials.import %s geometry.phil output.datablock=override_geometry.json' %image_files easy_run.fully_buffered(cmd) assert os.path.exists("override_geometry.json") datablock = load.datablock("override_geometry.json")[0] imgset = datablock.extract_imagesets()[0] beam = imgset.get_beam() detector = imgset.get_detector() goniometer = imgset.get_goniometer() scan = imgset.get_scan() assert beam.get_wavelength() == 2 assert beam.get_direction() == (-1,0,0) assert detector[0].get_name() == "New panel" assert detector[0].get_type() == "New type" assert detector[0].get_pixel_size() == (10,20) assert detector[0].get_image_size() == (30,40) assert detector[0].get_trusted_range() == (50,60) assert detector[0].get_thickness() == 70 assert detector[0].get_material() == "New material" assert detector[0].get_fast_axis() == (-1,0,0) assert detector[0].get_slow_axis() == (0,-1,0) assert detector[0].get_origin() == (100,100,100) assert goniometer.get_rotation_axis() == (0,0,-1) assert goniometer.get_fixed_rotation() == (0,1,2,3,4,5,6,7,8) assert goniometer.get_setting_rotation() == (8,7,6,5,4,3,2,1,0) assert scan.get_image_range() == (1,4) assert scan.get_oscillation() == (1,2) print 'OK'
def run(args): from dials.util.options import OptionParser from dials.util.options import flatten_datablocks import libtbx.load_env usage = "%s [options] datablock.json reference=reference_datablock.json" % ( libtbx.env.dispatcher_name ) parser = OptionParser( usage=usage, phil=phil_scope, read_datablocks=True, check_format=False, epilog=help_message, ) params, options = parser.parse_args(show_diff_phil=True) datablocks = flatten_datablocks(params.input.datablock) if len(datablocks) == 0: parser.print_help() exit() # Load reference geometry reference_detector = None if params.input.reference is not None: from dxtbx.serialize import load try: reference_experiments = load.experiment_list( params.input.reference, check_format=False ) assert len(reference_experiments.detectors()) == 1 reference_detector = reference_experiments.detectors()[0] except Exception as e: reference_datablocks = load.datablock(params.input.reference) assert len(reference_datablocks) == 1 imageset = reference_datablocks[0].extract_imagesets()[0] reference_detector = imageset.get_detector() assert len(datablocks) == 1 imageset = datablocks[0].extract_imagesets()[0] detector = imageset.get_detector() h = detector.hierarchy() href = reference_detector.hierarchy() assert len(h) == len(href) assert ( abs(h.get_directed_distance() - href.get_directed_distance()) < params.max_delta_distance ) for panel, panel_ref in zip(h.children(), href.children()): panel.set_local_frame( panel_ref.get_local_fast_axis(), panel_ref.get_local_slow_axis(), panel_ref.get_local_origin(), ) print("Writing metrology-corrected datablock to %s" % params.output.datablock) from dxtbx.serialize import dump dump.datablock(datablocks, params.output.datablock) return
def tst_override_geometry(self): from glob import glob import os from libtbx import easy_run from dxtbx.serialize import load # Find the image files image_files = glob(os.path.join(self.path, "centroid*.cbf")) image_files = ' '.join(image_files) # Write a geometry phil file with open("geometry.phil", "w") as outfile: outfile.write( ''' geometry { beam { wavelength = 2 direction = (-1,0,0) } detector { panel { name = "New panel" type = "New type" pixel_size = 10,20 image_size = 30,40 trusted_range = 50,60 thickness = 70 material = "Si" fast_axis = -1,0,0 slow_axis = 0,-1,0 origin = 100,100,100 } } goniometer { axes = 0,0,-1 fixed_rotation = 0,1,2,3,4,5,6,7,8 setting_rotation = 8,7,6,5,4,3,2,1,0 } scan { image_range = 1,4 oscillation = 1,2 } } ''') # provide mosflm beam centre to dials.import cmd = 'dials.import %s geometry.phil output.datablock=override_geometry.json' %image_files easy_run.fully_buffered(cmd).raise_if_errors() assert os.path.exists("override_geometry.json") datablock = load.datablock("override_geometry.json")[0] imgset = datablock.extract_imagesets()[0] beam = imgset.get_beam() detector = imgset.get_detector() goniometer = imgset.get_goniometer() scan = imgset.get_scan() assert beam.get_wavelength() == 2 assert beam.get_direction() == (-1,0,0) assert detector[0].get_name() == "New panel" assert detector[0].get_type() == "New type" assert detector[0].get_pixel_size() == (10,20) assert detector[0].get_image_size() == (30,40) assert detector[0].get_trusted_range() == (50,60) assert detector[0].get_thickness() == 70 assert detector[0].get_material() == "Si" assert detector[0].get_fast_axis() == (-1,0,0) assert detector[0].get_slow_axis() == (0,-1,0) assert detector[0].get_origin() == (100,100,100) assert goniometer.get_rotation_axis_datum() == (0,0,-1) assert goniometer.get_fixed_rotation() == (0,1,2,3,4,5,6,7,8) assert goniometer.get_setting_rotation() == (8,7,6,5,4,3,2,1,0) assert scan.get_image_range() == (1,4) assert scan.get_oscillation() == (1,2) print 'OK'
def exercise_2(): from dials.test.algorithms.indexing.tst_index import run_one_indexing if not have_xia2_regression: print "Skipping exercise_2(): xia2_regression not available." return curdir = os.path.abspath(os.curdir) print curdir data_dir = os.path.join(xia2_regression, "test_data", "i04_bag_training") import glob g = glob.glob(os.path.join(data_dir, "*.cbf*")) if len(g) == 0: print "Skipping exercise_2(): xia2_regression files not downloaded." print "Run xia2_regression.fetch_test_data first." return # beam centre from image headers: 205.28,210.76 mm args = ["dials.import", "mosflm_beam_centre=207,212"] + g command = " ".join(args) #print command result = easy_run.fully_buffered(command=command).raise_if_errors() assert os.path.exists('datablock.json') # spot-finding, just need a subset of the data args = [ "dials.find_spots", "datablock.json", "scan_range=1,10", "scan_range=531,540" ] command = " ".join(args) print command result = easy_run.fully_buffered(command=command).raise_if_errors() assert os.path.exists('strong.pickle') # actually run the beam centre search args = [ "dials.discover_better_experimental_model", "datablock.json", "strong.pickle" ] command = " ".join(args) print command result = easy_run.fully_buffered(command=command).raise_if_errors() result.show_stdout() assert os.path.exists('optimized_datablock.json') # look at the results from dxtbx.serialize import load datablocks = load.datablock("datablock.json", check_format=False) original_imageset = datablocks[0].extract_imagesets()[0] optimized_datablock = load.datablock('optimized_datablock.json', check_format=False) detector_1 = original_imageset.get_detector() detector_2 = optimized_datablock[0].unique_detectors()[0] shift = (matrix.col(detector_1[0].get_origin()) - matrix.col(detector_2[0].get_origin())) print shift # check we can actually index the resulting optimized datablock from cctbx import uctbx expected_unit_cell = uctbx.unit_cell( (57.780, 57.800, 150.017, 89.991, 89.990, 90.007)) expected_rmsds = (0.06, 0.05, 0.001) expected_hall_symbol = ' P 1' result = run_one_indexing(os.path.join(curdir, 'strong.pickle'), os.path.join(curdir, 'optimized_datablock.json'), [], expected_unit_cell, expected_rmsds, expected_hall_symbol)
def run(): have_dials_regression = libtbx.env.has_module("dials_regression") if not have_dials_regression: print "Skipped: dials_regression not available" return dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) from dials.test.algorithms.indexing.tst_index import run_one_indexing expected_unit_cell = uctbx.unit_cell( (11.624, 13.550, 30.103, 89.964, 93.721, 90.132)) expected_rmsds = (0.039, 0.035, 0.002) datablock_old = os.path.join( dials_regression, "indexing_test_data/phi_scan/datablock_old.json") datablock_new = os.path.join(dials_regression, "indexing_test_data/phi_scan/datablock.json") strong_pickle = os.path.join(dials_regression, "indexing_test_data/phi_scan/strong.pickle") from dxtbx.serialize import load imageset_old = load.datablock(datablock_old, check_format=False)[0].extract_imagesets()[0] imageset_new = load.datablock(datablock_new, check_format=False)[0].extract_imagesets()[0] gonio_old = imageset_old.get_goniometer() gonio_new = imageset_new.get_goniometer() assert approx_equal( gonio_old.get_rotation_axis(), (0.7497646259807715, -0.5517923303436749, 0.36520984351713554)) assert approx_equal(gonio_old.get_setting_rotation(), (1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) assert approx_equal( gonio_old.get_fixed_rotation(), (0.7497646259807748, -0.20997265900532208, -0.6275065641872948, -0.5517923303436731, 0.3250014637526764, -0.7680490041218182, 0.3652098435171313, 0.9221092836691605, 0.12781329809272568)) assert approx_equal(gonio_new.get_rotation_axis(), gonio_old.get_rotation_axis()) assert approx_equal(gonio_new.get_rotation_axis_datum(), (1, 0, 0)) assert approx_equal( gonio_new.get_setting_rotation(), (0.7497646259807705, -0.20997265900532142, -0.6275065641873, -0.5517923303436786, 0.3250014637526763, -0.768049004121814, 0.3652098435171315, 0.9221092836691607, 0.12781329809272335)) assert approx_equal(gonio_new.get_fixed_rotation(), (1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) result_old = run_one_indexing( pickle_path=strong_pickle, sweep_path=datablock_old, extra_args=[], expected_unit_cell=expected_unit_cell, expected_rmsds=expected_rmsds, expected_hall_symbol=' P 1', ) result_new = run_one_indexing( pickle_path=strong_pickle, sweep_path=datablock_new, extra_args=[], expected_unit_cell=expected_unit_cell, expected_rmsds=expected_rmsds, expected_hall_symbol=' P 1', ) assert approx_equal(result_old.rmsds, result_new.rmsds) assert approx_equal(result_old.crystal_model.get_unit_cell().parameters(), result_new.crystal_model.get_unit_cell().parameters()) # Now test refinement gradients are correct from dxtbx.model.experiment_list import ExperimentList, Experiment old_exps = ExperimentList([ Experiment(beam=imageset_old.get_beam(), detector=imageset_old.get_detector(), goniometer=gonio_old, scan=imageset_old.get_scan(), crystal=result_old.crystal_model, imageset=None) ]) new_exps = ExperimentList([ Experiment(beam=imageset_new.get_beam(), detector=imageset_new.get_detector(), goniometer=gonio_new, scan=imageset_new.get_scan(), crystal=result_new.crystal_model, imageset=None) ]) from libtbx.phil import parse from dials.algorithms.refinement.refiner import phil_scope params = phil_scope.fetch(source=parse('')).extract() from dials.algorithms.refinement.refiner import RefinerFactory refiner_old = RefinerFactory.from_parameters_data_experiments( params, result_old.indexed_reflections, old_exps, verbosity=0) refiner_new = RefinerFactory.from_parameters_data_experiments( params, result_new.indexed_reflections, new_exps, verbosity=0) # Analytical gradients should be approximately the same in either case an_grads_old = refiner_old._pred_param.get_gradients( refiner_old.get_matches()) an_grads_new = refiner_new._pred_param.get_gradients( refiner_new.get_matches()) for g1, g2 in zip(an_grads_old, an_grads_new): assert approx_equal(g1["dX_dp"], g2["dX_dp"], eps=1.e-6) assert approx_equal(g1["dY_dp"], g2["dY_dp"], eps=1.e-6) assert approx_equal(g1["dphi_dp"], g2["dphi_dp"], eps=1.e-6) # Analytical gradients should be approximately equal to finite difference # gradients in either case fd_grads_old = calc_fd_grads(refiner_old) for g1, g2 in zip(fd_grads_old, an_grads_old): assert approx_equal(g1["dX_dp"], g2["dX_dp"], eps=5.e-6) assert approx_equal(g1["dY_dp"], g2["dY_dp"], eps=5.e-6) assert approx_equal(g1["dphi_dp"], g2["dphi_dp"], eps=5.e-6) fd_grads_new = calc_fd_grads(refiner_new) for g1, g2 in zip(fd_grads_new, an_grads_new): assert approx_equal(g1["dX_dp"], g2["dX_dp"], eps=5.e-6) assert approx_equal(g1["dY_dp"], g2["dY_dp"], eps=5.e-6) assert approx_equal(g1["dphi_dp"], g2["dphi_dp"], eps=5.e-6)
def run_once(directory): from dxtbx.serialize import load sweep_dir = os.path.basename(directory) print(sweep_dir) datablock_name = os.path.join(directory, "datablock.json") if not os.path.exists(datablock_name): # this is what xia2 calls it: datablock_name = os.path.join(directory, "datablock_import.json") strong_spots_name = os.path.join(directory, "strong.pickle") experiments_name = os.path.join(directory, "experiments.json") indexed_spots_name = os.path.join(directory, "indexed.pickle") unindexed_spots_name = os.path.join(directory, "unindexed.pickle") if not (os.path.exists(datablock_name) and os.path.exists(strong_spots_name)): return datablock = load.datablock(datablock_name) assert len(datablock) == 1 if len(datablock[0].extract_sweeps()) == 0: print("Skipping %s" % directory) return sweep = datablock[0].extract_sweeps()[0] template = sweep.get_template() strong_spots = easy_pickle.load(strong_spots_name) n_strong_spots = len(strong_spots) if os.path.exists(experiments_name): experiments = load.experiment_list(experiments_name) n_indexed_lattices = len(experiments) else: experiments = None n_indexed_lattices = 0 g = glob.glob(os.path.join(directory, "xds*", "run_2", "INTEGRATE.HKL")) n_integrated_lattices = len(g) if os.path.exists(indexed_spots_name): indexed_spots = easy_pickle.load(indexed_spots_name) else: indexed_spots = None g = glob.glob(os.path.join(directory, "indexed_*.pickle")) if len(g): for path in g: if indexed_spots is None: indexed_spots = easy_pickle.load(path) else: indexed_spots.extend(easy_pickle.load(path)) if os.path.exists(unindexed_spots_name): unindexed_spots = easy_pickle.load(unindexed_spots_name) n_unindexed_spots = len(unindexed_spots) else: n_unindexed_spots = 0 # calculate estimated d_min for sweep based on 95th percentile from dials.algorithms.indexing import indexer detector = sweep.get_detector() scan = sweep.get_scan() beam = sweep.get_beam() goniometer = sweep.get_goniometer() if len(strong_spots) == 0: d_strong_spots_99th_percentile = 0 d_strong_spots_95th_percentile = 0 d_strong_spots_50th_percentile = 0 n_strong_spots_dmin_4 = 0 else: spots_mm = indexer.Indexer.map_spots_pixel_to_mm_rad( strong_spots, detector, scan) indexer.Indexer.map_centroids_to_reciprocal_space( spots_mm, detector, beam, goniometer) d_spacings = 1 / spots_mm["rlp"].norms() perm = flex.sort_permutation(d_spacings, reverse=True) d_spacings_sorted = d_spacings.select(perm) percentile_99th = int(math.floor(0.99 * len(d_spacings))) percentile_95th = int(math.floor(0.95 * len(d_spacings))) percentile_50th = int(math.floor(0.5 * len(d_spacings))) d_strong_spots_99th_percentile = d_spacings_sorted[percentile_99th] d_strong_spots_95th_percentile = d_spacings_sorted[percentile_95th] d_strong_spots_50th_percentile = d_spacings_sorted[percentile_50th] n_strong_spots_dmin_4 = (d_spacings >= 4).count(True) cell_params = flex.sym_mat3_double() n_indexed = flex.double() d_min_indexed = flex.double() rmsds = flex.vec3_double() sweep_dir_cryst = flex.std_string() if experiments is not None: for i, experiment in enumerate(experiments): sweep_dir_cryst.append(sweep_dir) crystal_model = experiment.crystal unit_cell = crystal_model.get_unit_cell() space_group = crystal_model.get_space_group() crystal_symmetry = crystal.symmetry(unit_cell=unit_cell, space_group=space_group) cb_op_reference_setting = ( crystal_symmetry.change_of_basis_op_to_reference_setting()) crystal_symmetry_reference_setting = crystal_symmetry.change_basis( cb_op_reference_setting) cell_params.append( crystal_symmetry_reference_setting.unit_cell().parameters()) spots_mm = indexed_spots.select(indexed_spots["id"] == i) n_indexed.append(len(spots_mm)) if len(spots_mm) == 0: d_min_indexed.append(0) else: indexer.Indexer.map_centroids_to_reciprocal_space( spots_mm, detector, beam, goniometer) d_spacings = 1 / spots_mm["rlp"].norms() perm = flex.sort_permutation(d_spacings, reverse=True) d_min_indexed.append(d_spacings[perm[-1]]) try: rmsds.append(get_rmsds_obs_pred(spots_mm, experiment)) except Exception as e: print(e) rmsds.append((-1, -1, -1)) continue return group_args( sweep_dir=sweep_dir, template=template, n_strong_spots=n_strong_spots, n_strong_spots_dmin_4=n_strong_spots_dmin_4, n_unindexed_spots=n_unindexed_spots, n_indexed_lattices=n_indexed_lattices, n_integrated_lattices=n_integrated_lattices, d_strong_spots_50th_percentile=d_strong_spots_50th_percentile, d_strong_spots_95th_percentile=d_strong_spots_95th_percentile, d_strong_spots_99th_percentile=d_strong_spots_99th_percentile, cell_params=cell_params, n_indexed=n_indexed, d_min_indexed=d_min_indexed, rmsds=rmsds, sweep_dir_cryst=sweep_dir_cryst, )
def run_once(directory): from dxtbx.serialize import load sweep_dir = os.path.basename(directory) print sweep_dir datablock_name = os.path.join(directory, "datablock.json") if not os.path.exists(datablock_name): # this is what xia2 calls it: datablock_name = os.path.join(directory, "datablock_import.json") strong_spots_name = os.path.join(directory, "strong.pickle") experiments_name = os.path.join(directory, "experiments.json") indexed_spots_name = os.path.join(directory, "indexed.pickle") unindexed_spots_name = os.path.join(directory, "unindexed.pickle") if not (os.path.exists(datablock_name) and os.path.exists(strong_spots_name)): return datablock = load.datablock(datablock_name) assert len(datablock) == 1 if len(datablock[0].extract_sweeps()) == 0: print "Skipping %s" %directory return sweep = datablock[0].extract_sweeps()[0] template = sweep.get_template() strong_spots = easy_pickle.load(strong_spots_name) n_strong_spots = len(strong_spots) if os.path.exists(experiments_name): experiments = load.experiment_list(experiments_name) n_indexed_lattices = len(experiments) else: experiments = None n_indexed_lattices = 0 g = glob.glob(os.path.join(directory, "xds*", "run_2", "INTEGRATE.HKL")) n_integrated_lattices = len(g) if os.path.exists(indexed_spots_name): indexed_spots = easy_pickle.load(indexed_spots_name) else: indexed_spots = None g = glob.glob(os.path.join(directory, "indexed_*.pickle")) if len(g): for path in g: if indexed_spots is None: indexed_spots = easy_pickle.load(path) else: indexed_spots.extend(easy_pickle.load(path)) if os.path.exists(unindexed_spots_name): unindexed_spots = easy_pickle.load(unindexed_spots_name) n_unindexed_spots = len(unindexed_spots) else: n_unindexed_spots = 0 # calculate estimated d_min for sweep based on 95th percentile from dials.algorithms.indexing import indexer detector = sweep.get_detector() scan = sweep.get_scan() beam = sweep.get_beam() goniometer = sweep.get_goniometer() if len(strong_spots) == 0: d_strong_spots_99th_percentile = 0 d_strong_spots_95th_percentile = 0 d_strong_spots_50th_percentile = 0 n_strong_spots_dmin_4 = 0 else: spots_mm = indexer.indexer_base.map_spots_pixel_to_mm_rad( strong_spots, detector, scan) indexer.indexer_base.map_centroids_to_reciprocal_space( spots_mm, detector, beam, goniometer) d_spacings = 1/spots_mm['rlp'].norms() perm = flex.sort_permutation(d_spacings, reverse=True) d_spacings_sorted = d_spacings.select(perm) percentile_99th = int(math.floor(0.99 * len(d_spacings))) percentile_95th = int(math.floor(0.95 * len(d_spacings))) percentile_50th = int(math.floor(0.5 * len(d_spacings))) d_strong_spots_99th_percentile = d_spacings_sorted[percentile_99th] d_strong_spots_95th_percentile = d_spacings_sorted[percentile_95th] d_strong_spots_50th_percentile = d_spacings_sorted[percentile_50th] n_strong_spots_dmin_4 = (d_spacings >= 4).count(True) cell_params = flex.sym_mat3_double() n_indexed = flex.double() d_min_indexed = flex.double() rmsds = flex.vec3_double() sweep_dir_cryst = flex.std_string() if experiments is not None: for i, experiment in enumerate(experiments): sweep_dir_cryst.append(sweep_dir) crystal_model = experiment.crystal unit_cell = crystal_model.get_unit_cell() space_group = crystal_model.get_space_group() crystal_symmetry = crystal.symmetry(unit_cell=unit_cell, space_group=space_group) cb_op_reference_setting = crystal_symmetry.change_of_basis_op_to_reference_setting() crystal_symmetry_reference_setting = crystal_symmetry.change_basis( cb_op_reference_setting) cell_params.append(crystal_symmetry_reference_setting.unit_cell().parameters()) spots_mm = indexed_spots.select(indexed_spots['id'] == i) n_indexed.append(len(spots_mm)) if len(spots_mm) == 0: d_min_indexed.append(0) else: indexer.indexer_base.map_centroids_to_reciprocal_space( spots_mm, detector, beam, goniometer) d_spacings = 1/spots_mm['rlp'].norms() perm = flex.sort_permutation(d_spacings, reverse=True) d_min_indexed.append(d_spacings[perm[-1]]) try: rmsds.append(get_rmsds_obs_pred(spots_mm, experiment)) except Exception, e: print e rmsds.append((-1,-1,-1)) continue