def test_DLS_I03_smargon(dials_data):
    filename = dials_data("image_examples").join(
        "DLS_I03_smargon_0001.cbf.gz").strpath
    assert FormatCBFFullPilatusDLS6MSN126.understand(filename)
    expts = ExperimentListFactory.from_filenames(
        [filename], format_kwargs={"dynamic_shadowing": True})
    assert len(expts) == 1
    imageset = expts[0].imageset
    assert imageset.get_format_class() == FormatCBFFullPilatusDLS6MSN126
    gonio = imageset.get_goniometer()
    assert list(gonio.get_angles()) == pytest.approx([45.0, 45.0, 45.0])
    assert list(gonio.get_axes().as_double()) == pytest.approx(
        [1.0, 0.0, 0.0, 0.0, 0.0, -1.0, 1.0, 0.0, 0.0])
    assert list(gonio.get_names()) == ["GON_PHI", "GON_CHI", "GON_OMEGA"]
    assert imageset.has_dynamic_mask()
    masker = imageset.masker()
    assert isinstance(masker, SmarGonShadowMasker)
    assert masker.get_mask(imageset.get_detector(), 0)[0].count(False) == 0
    assert masker.get_mask(imageset.get_detector(),
                           100)[0].count(False) == 261588
Beispiel #2
0
def get_template_info(exp_path, img_num):
    try:
        experiments = ExperimentListFactory.from_json_file(exp_path)

        max_img_num = 0
        for single_sweep in experiments.imagesets():
            max_img_num += len(single_sweep.indices())

        max_img_num -= 1
        print("max_img_num =", max_img_num)
        if img_num < 0:
            new_img_num = 0
            print("time to correct image number to ", new_img_num)

        elif img_num > max_img_num:
            new_img_num = max_img_num
            print("time to reduce image number to ", new_img_num)

        else:
            new_img_num = img_num

        on_sweep_img_num, n_sweep = get_correct_img_num_n_sweep_num(
            experiments, new_img_num)
        my_sweep = experiments.imagesets()[n_sweep]

        str_json = my_sweep.get_template()
        print("getting template for image num:", new_img_num)
        img_path = my_sweep.get_path(on_sweep_img_num)
        print("\n get_path =", img_path, "\n")

        data_xy_flex = my_sweep.get_raw_data(0)[0].as_double()
        img_with, img_height = data_xy_flex.all()[0:2]
        return [str_json, img_with, img_height, img_path, new_img_num]

    except IndexError:
        print(" *** IndexError in template ***")
        return

    except OverflowError:
        print(" *** OverflowError in template ***")
        return
def test_soleil_Proxima2A_zenodo_1443110_data03():
    # https://zenodo.org/record/1221344#.XEHr_5ynx2Q
    master_h5 = "/dls/mx-scratch/rjgildea/zenodo/1221344/200Hz/3_5_200Hz_1_master.h5"

    if not os.access(master_h5, os.R_OK):
        pytest.skip("Test images not available")

    assert FormatHDF5EigerNearlyNexus.understand(master_h5)

    expts = ExperimentListFactory.from_filenames([master_h5])
    imageset = expts[0].imageset
    assert imageset.get_format_class() == FormatHDF5EigerNearlyNexus

    detector = imageset.get_detector()
    gonio = imageset.get_goniometer()
    scan = imageset.get_scan()
    beam = imageset.get_beam()

    panel = detector[0]
    assert panel.get_pixel_size() == pytest.approx((0.075, 0.075))
    assert panel.get_image_size() == (3110, 3269)
    assert panel.get_trusted_range() == (-1, 12440)
    assert panel.get_fast_axis() == (1, 0, 0)
    assert panel.get_slow_axis() == (0, -1, 0)
    assert panel.get_thickness() == pytest.approx(0.45)
    assert panel.get_mu() == pytest.approx(3.96763)
    assert panel.get_material() == "Si"
    assert panel.get_origin() == pytest.approx((-120.556, 118.982, -134.255),
                                               abs=1e-3)
    assert panel.get_distance() == pytest.approx(134.255)

    assert isinstance(gonio, Goniometer)
    assert gonio.get_rotation_axis() == (1, 0, 0)
    assert gonio.get_fixed_rotation() == (1, 0, 0, 0, 1, 0, 0, 0, 1)
    assert gonio.get_setting_rotation() == (1, 0, 0, 0, 1, 0, 0, 0, 1)

    assert scan.get_oscillation() == pytest.approx((0, 0.5))
    assert scan.get_image_range() == (1, 800)

    assert beam.get_wavelength() == pytest.approx(0.980112, abs=1e-5)
    assert beam.get_s0() == pytest.approx((0, 0, -1 / beam.get_wavelength()))
def test_experimentlist_factory_from_imageset():
  from dxtbx.model import Beam, Detector, Goniometer, Scan
  from dxtbx.model import Crystal
  from dxtbx.format.Format import Format

  imageset = Format.get_imageset(["filename.cbf"], as_imageset=True)
  imageset.set_beam(Beam(), 0)
  imageset.set_detector(Detector(), 0)

  crystal = Crystal(
    (1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol="P1")

  experiments = ExperimentListFactory.from_imageset_and_crystal(
    imageset, crystal)


  assert len(experiments) == 1
  assert experiments[0].imageset is not None
  assert experiments[0].beam is not None
  assert experiments[0].detector is not None
  assert experiments[0].crystal is not None
def test_from_null_sequence():
    filenames = ["template_%2d.cbf" % (i + 1) for i in range(0, 10)]
    sequence = Format.get_imageset(
        filenames,
        beam=Beam((0, 0, 1)),
        detector=Detector(),
        goniometer=Goniometer((1, 0, 0)),
        scan=Scan((1, 10), (0, 0.1)),
    )

    # Create the experiments
    experiments = ExperimentListFactory.from_sequence_and_crystal(sequence,
                                                                  crystal=None)
    assert len(experiments) == 1
    imagesets = experiments.imagesets()
    assert imagesets[0].get_format_class()
    assert len(imagesets) == 1
    assert imagesets[0].get_beam() == sequence.get_beam()
    assert imagesets[0].get_detector() == sequence.get_detector()
    assert imagesets[0].get_goniometer() == sequence.get_goniometer()
    assert imagesets[0].get_scan() == sequence.get_scan()
Beispiel #6
0
def test_shoebox_memory_is_a_reasonable_guesstimate(dials_data):
    path = dials_data("centroid_test_data").join("experiments.json").strpath

    exlist = ExperimentListFactory.from_json_file(path)[0]
    exlist.profile = Model(
        None,
        n_sigma=3,
        sigma_b=0.024 * math.pi / 180.0,
        sigma_m=0.044 * math.pi / 180.0,
    )

    rlist = flex.reflection_table.from_predictions(exlist)
    rlist["id"] = flex.int(len(rlist), 0)
    rlist["bbox"] = flex.int6(rlist.size(), (0, 1, 0, 1, 0, 1))

    jobs = JobList()
    jobs.add((0, 1), (0, 9), 9)
    for flatten in (True, False):
        assumed_memory_usage = list(jobs.shoebox_memory(rlist, flatten))
        assert len(assumed_memory_usage) == 1
        assert assumed_memory_usage[0] == pytest.approx(23952, abs=3000)
    def from_dict(cls, obj):
        assert obj["__id__"] == "Indexer"
        assert obj["__name__"] == cls.__name__
        return_obj = cls()
        for k, v in obj.iteritems():
            if k == "_indxr_helper" and v is not None:
                from xia2.Schema.Interfaces.Indexer import _IndexerHelper

                v = _IndexerHelper(v)
            if k == "_indxr_imagesets" and len(v):
                assert v[0].get("__id__") == "imageset"
                from dxtbx.serialize.imageset import imageset_from_dict

                v = [imageset_from_dict(v_, check_format=False) for v_ in v]
            if isinstance(v, dict):
                if v.get("__id__") == "ExperimentList":
                    from dxtbx.model.experiment_list import ExperimentListFactory

                    v = ExperimentListFactory.from_dict(v, check_format=False)
            setattr(return_obj, k, v)
        return return_obj
Beispiel #8
0
def data(dials_regression):  # read experiments and reflections
    from collections import namedtuple
    from dials.array_family import flex
    from dxtbx.model.experiment_list import ExperimentListFactory
    from os.path import join

    directory = join(dials_regression, "integration_test_data", "shoeboxes")
    experiments_filename = join(directory, "integrated_experiments.json")
    reflections_filename = join(directory, "shoeboxes_0_0.pickle")
    reference_filename = join(directory, "reference_profiles.pickle")

    experiments = ExperimentListFactory.from_json_file(experiments_filename,
                                                       check_format=False)
    reflections = flex.reflection_table.from_file(reflections_filename)
    with open(reference_filename, "rb") as fh:
        reference = pickle.load(fh)

    Data = namedtuple("Data", ["experiments", "reflections", "reference"])
    return Data(experiments=experiments,
                reflections=reflections,
                reference=reference)
Beispiel #9
0
 def from_dict(cls, obj):
     assert obj['__id__'] == 'Integrater'
     return_obj = cls()
     for k, v in obj.iteritems():
         if k in ('_intgr_indexer', '_intgr_refiner') and v is not None:
             from libtbx.utils import import_python_object
             cls = import_python_object(import_path=".".join(
                 (v['__module__'], v['__name__'])),
                                        error_prefix='',
                                        target_must_be='',
                                        where_str='').object
             v = cls.from_dict(v)
         if isinstance(v, dict):
             if v.get('__id__') == 'ExperimentList':
                 from dxtbx.model.experiment_list import ExperimentListFactory
                 v = ExperimentListFactory.from_dict(v)
             elif v.get('__id__') == 'imageset':
                 from dxtbx.serialize.imageset import imageset_from_dict
                 v = imageset_from_dict(v, check_format=False)
         setattr(return_obj, k, v)
     return return_obj
Beispiel #10
0
class Test(object):
    def __init__(self):
        import libtbx.load_env
        try:
            dials_regression = libtbx.env.dist_path('dials_regression')
        except KeyError, e:
            print 'FAIL: dials_regression not configured'
            exit(0)

        import dials
        import os

        filename = os.path.join(dials_regression, 'centroid_test_data',
                                'experiments.json')

        from dxtbx.model.experiment_list import ExperimentListFactory
        self.exlist = ExperimentListFactory.from_json_file(filename)
        assert (len(self.exlist) == 1)

        from dials.array_family import flex
        self.rlist = flex.reflection_table.from_predictions_multi(self.exlist)
def test_rotation_scan_i03_2019_run_4(master_h5):
    assert FormatNexusEigerDLS16M.understand(master_h5)

    expts = ExperimentListFactory.from_filenames(
        [master_h5], format_kwargs={"dynamic_shadowing": True}
    )
    imageset = expts[0].imageset
    assert imageset.get_format_class() == FormatNexusEigerDLS16M

    gonio = imageset.get_goniometer()
    assert list(gonio.get_angles()) == pytest.approx([45.0, 45.0, 45.0])
    assert list(gonio.get_axes().as_double()) == pytest.approx(
        [1.0, -0.0025, 0.0056, -0.006, -0.0264, -0.9996, 1.0, 0.0, 0.0]
    )
    assert list(gonio.get_names()) == ["phi", "chi", "omega"]
    assert imageset.has_dynamic_mask()
    masker = imageset.masker()
    assert isinstance(masker, SmarGonShadowMasker)
    assert masker.get_mask(imageset.get_detector(), 0)[0].count(False) == 0
    masker.get_mask(imageset.get_detector(), 50)[0].count(False) == 486717
    assert masker.get_mask(imageset.get_detector(), 100)[0].count(False) == 1092226
Beispiel #12
0
def test3(dials_regression, tmpdir):
    """Strict check for scan-varying refinement using automated outlier rejection
  block width and interval width setting"""
    tmpdir.chdir()

    # use the i04_weak_data for this test
    data_dir = os.path.join(dials_regression, "refinement_test_data",
                            "centroid")
    experiments_path = os.path.join(data_dir,
                                    "experiments_XPARM_REGULARIZED.json")
    pickle_path = os.path.join(data_dir, "spot_all_xds.pickle")

    for pth in (experiments_path, pickle_path):
        assert os.path.exists(pth)

    cmd1 = (
        "dials.refine " + experiments_path + " " + pickle_path +
        " scan_varying=true max_iterations=5 output.history=history.pickle "
        "crystal.orientation.smoother.interval_width_degrees=auto "
        "crystal.unit_cell.smoother.interval_width_degrees=auto")
    result1 = easy_run.fully_buffered(command=cmd1).raise_if_errors()

    # load and check results
    with open("history.pickle", "rb") as fh:
        history = pickle.load(fh)

    expected_rmsds = [[0.619507829, 0.351326044, 0.006955399],
                      [0.174024575, 0.113486044, 0.004704006],
                      [0.098351363, 0.084052519, 0.002660408],
                      [0.069202909, 0.072796782, 0.001451734],
                      [0.064305277, 0.071560831, 0.001165639],
                      [0.062955462, 0.071315612, 0.001074453]]
    assert approx_equal(history['rmsd'], expected_rmsds)

    # check the refined unit cell
    ref_exp = ExperimentListFactory.from_json_file("refined_experiments.json",
                                                   check_format=False)[0]
    unit_cell = ref_exp.crystal.get_unit_cell().parameters()
    assert unit_cell == pytest.approx(
        [42.27482, 42.27482, 39.66893, 90.00000, 90.00000, 90.00000], abs=1e-3)
Beispiel #13
0
def test_scan_varying_multi_scan_one_crystal(gcb, dials_data, tmpdir):
    # https://github.com/dials/dials/issues/994
    location = dials_data("l_cysteine_dials_output")
    refls = location.join("indexed.refl")
    expts = location.join("indexed.expt")

    # Set options for quick rather than careful refinement
    result = procrunner.run(
        (
            "dials.refine",
            expts,
            refls,
            "output.history=history.json",
            "outlier.algorithm=tukey",
            "max_iterations=3",
            "unit_cell.smoother.interval_width_degrees=56",
            "orientation.smoother.interval_width_degrees=56",
            "gradient_calculation_blocksize=" + gcb,
        ),
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr

    el = ExperimentListFactory.from_json_file(
        tmpdir.join("refined.expt").strpath, check_format=False)

    # Crystal has been copied into each experiment for scan-varying refinement
    assert len(el.crystals()) == 4

    # load and check results
    history = Journal.from_json_file(tmpdir.join("history.json").strpath)

    expected_rmsds = [
        (0.1401658782847504, 0.2225931584837884, 0.002349912655443814),
        (0.12060230585178289, 0.1585977879739876, 0.002114318828411418),
        (0.10970832317567975, 0.1348574975434352, 0.001955034565537597),
        (0.10373159352273859, 0.12827852889951505, 0.0017901404193256304),
    ]
    for a, b in zip(history["rmsd"], expected_rmsds):
        assert a == pytest.approx(b, abs=1e-6)
Beispiel #14
0
def test(dials_data, tmpdir):
    """Test two theta refine on integrated data."""
    # use multiple scan small molecule data for this test
    data_dir = dials_data("l_cysteine_dials_output", pathlib=True)
    prefix = (20, 25, 30, 35)
    exp_path = [
        data_dir / ("%d_integrated_experiments.json" % p) for p in prefix
    ]
    pkl_path = [data_dir / ("%d_integrated.pickle" % p) for p in prefix]

    for pth in exp_path + pkl_path:
        assert pth.is_file(), f"{str(pth)} missing"

    cmd = ([
        "dials.two_theta_refine",
        "cif=refined_cell.cif",
        "output.correlation_plot.filename=corrplot.png",
    ] + [str(p) for p in exp_path] + [str(p) for p in pkl_path])

    print(cmd)

    # work in a temporary directory
    result = procrunner.run(cmd, working_directory=tmpdir)
    assert not result.returncode and not result.stderr
    assert Path(tmpdir / "refined_cell.expt").is_file()
    ref_exp = ExperimentListFactory.from_json_file(str(tmpdir /
                                                       "refined_cell.expt"),
                                                   check_format=False)

    xls = ref_exp.crystals()
    assert len(xls) == 4
    for xl in xls:
        assert xl.get_unit_cell() != xl.get_recalculated_unit_cell()
        # test refined crystal model against expected values
        assert xl.get_recalculated_unit_cell().parameters() == pytest.approx(
            (5.428022880, 8.144145476, 12.039666971, 90.0, 90.0, 90.0), 1e-4)
        assert xl.get_recalculated_cell_parameter_sd() == pytest.approx(
            (9.58081e-5, 0.000149909, 0.000215765, 0, 0, 0), 1e-4)
        assert xl.get_recalculated_cell_volume_sd() == pytest.approx(
            0.0116254298, 1e-4)
Beispiel #15
0
def test_multi_panel_parameterisations(dials_regression,
                                       detector_parameterisation_choice):

    data_dir = os.path.join(dials_regression, "refinement_test_data",
                            "cspad_refinement")
    exp_file = os.path.join(data_dir,
                            "cspad_refined_experiments_step6_level2_300.json")
    ref_file = os.path.join(data_dir, "cspad_reflections_step7_300.pickle")

    reflections = flex.reflection_table.from_file(ref_file)
    experiments = ExperimentListFactory.from_json_file(exp_file,
                                                       check_format=False)

    # Set refinement parameters
    params = phil_scope.fetch(source=phil.parse("")).extract()
    params.refinement.parameterisation.detector.panels = (
        detector_parameterisation_choice)

    # Construct refiner
    refiner = RefinerFactory.from_parameters_data_experiments(
        params, reflections, experiments)
    assert refiner.experiment_type == "stills"
Beispiel #16
0
def test_extended(dials_regression, tmpdir):
    tmpdir.chdir()

    # Call dials.create_profile_model
    result = procrunner.run_process([
        'dials.create_profile_model',
        os.path.join(dials_regression, "integration_test_data",
                     "i04-weak-data2", 'experiments.json'),
        os.path.join(dials_regression, "integration_test_data",
                     "i04-weak-data2", 'indexed.pickle'),
    ])
    assert result['exitcode'] == 0
    assert result['stderr'] == ''
    assert os.path.exists('experiments_with_profile_model.json')

    from dxtbx.model.experiment_list import ExperimentListFactory
    experiments = ExperimentListFactory.from_json_file(
        "experiments_with_profile_model.json", check_format=False)
    sigma_b = experiments[0].profile.sigma_b(deg=True)
    sigma_m = experiments[0].profile.sigma_m(deg=True)
    assert sigma_b == pytest.approx(0.02195, abs=1e-3)
    assert sigma_m == pytest.approx(0.04187, abs=1e-3)
def test_experimentlist_factory_from_args(dials_regression):
  pytest.importorskip('dials')
  os.environ['DIALS_REGRESSION'] = dials_regression

  # Get all the filenames
  filenames = [
    os.path.join(dials_regression, 'experiment_test_data', 'experiment_1.json'),
    #os.path.join(dials_regression, 'experiment_test_data', 'experiment_2.json'),
    os.path.join(dials_regression, 'experiment_test_data', 'experiment_3.json'),
    os.path.join(dials_regression, 'experiment_test_data', 'experiment_4.json')]

  # Get the experiments from a list of filenames
  experiments = ExperimentListFactory.from_args(filenames, verbose=True)

  # Have 4 experiment
  assert len(experiments) == 3
  for i in range(3):
    assert experiments[i].imageset is not None
    assert experiments[i].beam is not None
    assert experiments[i].detector is not None
    assert experiments[i].goniometer is not None
    assert experiments[i].scan is not None
Beispiel #18
0
def test_run(dials_data):
    filename = dials_data("centroid_test_data").join("experiments.json").strpath

    exlist = ExperimentListFactory.from_json_file(filename)
    assert len(exlist) == 1

    rlist = flex.reflection_table.from_predictions_multi(exlist)

    corrector = CorrectionsMulti()
    for experiment in exlist:
        corrector.append(
            Corrections(experiment.beam, experiment.goniometer, experiment.detector)
        )

    lp1 = corrector.lp(rlist["id"], rlist["s1"])

    lp2 = flex.double(
        [LP_calculations(exlist[i], s1) for i, s1 in zip(rlist["id"], rlist["s1"])]
    )

    diff = flex.abs(lp1 - lp2)
    assert diff.all_lt(1e-7)
    def tst_from_imageset(self):
        from dxtbx.imageset import ImageSet, NullReader
        from dxtbx.model import Beam, Detector, Goniometer, Scan
        from dxtbx.model import Crystal

        imageset = ImageSet(NullReader(["filename.cbf"]))
        imageset.set_beam(Beam(), 0)
        imageset.set_detector(Detector(), 0)

        crystal = Crystal((1, 0, 0), (0, 1, 0), (0, 0, 1),
                          space_group_symbol="P1")

        experiments = ExperimentListFactory.from_imageset_and_crystal(
            imageset, crystal)

        assert (len(experiments) == 1)
        assert (experiments[0].imageset is not None)
        assert (experiments[0].beam is not None)
        assert (experiments[0].detector is not None)
        assert (experiments[0].crystal is not None)

        print 'OK'
def data(dials_regression):  # read experiments and reflections
    directory = os.path.join(dials_regression, "integration_test_data",
                             "shoeboxes")
    experiments_filename = os.path.join(directory,
                                        "integrated_experiments.json")
    reflections_filename = os.path.join(directory, "shoeboxes_0_0.pickle")
    reference_filename = os.path.join(directory, "reference_profiles.pickle")

    experiments = ExperimentListFactory.from_json_file(experiments_filename,
                                                       check_format=False)
    reflections = flex.reflection_table.from_file(reflections_filename)
    with open(reference_filename, "rb") as fh:
        if six.PY3:
            reference = pickle.load(fh, encoding="bytes")
        else:
            reference = pickle.load(fh)

    Data = collections.namedtuple("Data",
                                  ["experiments", "reflections", "reference"])
    return Data(experiments=experiments,
                reflections=reflections,
                reference=reference)
Beispiel #21
0
def test_scan_varying_multi_scan_one_crystal(dials_data, tmpdir):
    # https://github.com/dials/dials/issues/994
    location = dials_data("l_cysteine_dials_output")
    refls = location.join("indexed.refl")
    expts = location.join("indexed.expt")

    result = procrunner.run(
        ("dials.refine", expts, refls, "output.history=history.json"),
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr

    el = ExperimentListFactory.from_json_file(
        tmpdir.join("refined.expt").strpath, check_format=False)

    # Crystal has been copied into each experiment for scan-varying refinement
    assert len(el.crystals()) == 4

    # load and check results
    history = Journal.from_json_file(tmpdir.join("history.json").strpath)

    expected_rmsds = [
        [0.102069933, 0.186479653, 0.000970519],
        [0.078117368, 0.105479383, 0.000691489],
        [0.058065104, 0.065957717, 0.000497565],
        [0.042720574, 0.052950359, 0.000393993],
        [0.034387246, 0.045392992, 0.000341015],
        [0.031183632, 0.041773097, 0.000308778],
        [0.029800047, 0.040413058, 0.000288812],
        [0.029161629, 0.039836196, 0.000280338],
        [0.028807767, 0.039529834, 0.000277679],
        [0.028551526, 0.039361871, 0.000276772],
        [0.028395222, 0.039322832, 0.000276637],
        [0.028334067, 0.039332485, 0.000276678],
        [0.028319859, 0.039338415, 0.000276687],
        [0.028317563, 0.039339503, 0.000276691],
    ]
    for a, b in zip(history["rmsd"], expected_rmsds):
        assert a == pytest.approx(b, abs=1e-6)
Beispiel #22
0
def test1():

    dials_regression = libtbx.env.find_in_repositories(
        relative_path="dials_regression", test=os.path.isdir)

    # use the i04_weak_data for this test
    data_dir = os.path.join(dials_regression, "refinement_test_data",
                            "i04_weak_data")
    experiments_path = os.path.join(data_dir, "experiments.json")
    pickle_path = os.path.join(data_dir, "indexed_strong.pickle")

    for pth in (experiments_path, pickle_path):
        assert os.path.exists(pth)

    cmd = "dials.slice_sweep " + experiments_path + " " + pickle_path + \
    ' "image_range=1 20"'
    print cmd

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_slice_sweep")
    os.chdir(tmp_dir)
    try:
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()
        # load results
        sliced_exp = ExperimentListFactory.from_json_file(
            "experiments_1_20.json", check_format=False)[0]
        with open("indexed_strong_1_20.pickle", "r") as f:
            sliced_refs = pickle.load(f)
    finally:
        os.chdir(cwd)

    # simple test of results
    assert sliced_exp.scan.get_image_range() == (1, 20)
    assert len(sliced_refs) == 3670

    print "OK"
    return
Beispiel #23
0
def test_integrator_3d(dials_data, nproc):
    from math import pi

    from dxtbx.model.experiment_list import ExperimentListFactory

    from dials.algorithms.profile_model.gaussian_rs import Model
    from dials.array_family import flex

    path = dials_data("centroid_test_data").join("experiments.json").strpath

    exlist = ExperimentListFactory.from_json_file(path)
    exlist[0].profile = Model(
        None, n_sigma=3, sigma_b=0.024 * pi / 180.0, sigma_m=0.044 * pi / 180.0
    )

    rlist = flex.reflection_table.from_predictions(exlist[0])
    rlist["id"] = flex.int(len(rlist), 0)
    rlist.compute_bbox(exlist)
    rlist.compute_zeta_multi(exlist)
    rlist.compute_d(exlist)

    from libtbx.phil import parse

    from dials.algorithms.integration.integrator import Integrator3D, phil_scope

    params = phil_scope.fetch(
        parse(
            """
    integration.block.size=%d
    integration.mp.nproc=%d
    integration.profile_fitting=False
  """
            % (5, nproc)
        )
    ).extract()

    integrator = Integrator3D(exlist, rlist, params)
    integrator.integrate()
Beispiel #24
0
    def load_image_file(self, filepath, experiments=None):
        """Loads experiment list and populates image information dictionary
        (can override to load images for old-timey HA14 processing)

        :param filepath: path to raw diffraction image (or pickle!)
        :param experiments: an ExperimentList object can be passed to this function
        :return: experiment list, error message (if any)
        """
        if not experiments:
            try:
                experiments = ExLF.from_filenames(filenames=[filepath])
            except Exception as e:
                error = "IOTA IMPORTER ERROR: Import failed! {}".format(e)
                print(error)
                return None, error

        # Load image information from experiment object
        try:
            imgset = experiments.imagesets()[0]
            beam = imgset.get_beam()
            s0 = beam.get_s0()
            detector = imgset.get_detector()[0]

            self.img_object.final["pixel_size"] = detector.get_pixel_size()[0]
            self.img_object.final["img_size"] = detector.get_image_size()
            self.img_object.final["beamX"] = detector.get_beam_centre(s0)[0]
            self.img_object.final["beamY"] = detector.get_beam_centre(s0)[1]
            self.img_object.final["gain"] = detector.get_gain()
            self.img_object.final["distance"] = detector.get_distance()
            self.img_object.final["wavelength"] = beam.get_wavelength()

        except Exception as e:
            error = "IOTA IMPORTER ERROR: Information extraction failed! {}".format(
                e)
            print(error)
            return experiments, error

        return experiments, None
Beispiel #25
0
def get_items(myrank=None, mykey=None):
    if use_postrefine:
        postreffed = parse_postrefine()
        print("# postrefined images", len(postreffed))
    else:
        postreffed = range(100000)  # alt for skipping postrefine step
    maxy = None
    ycount = 0
    for key in postreffed:
        if mykey is not None:
            if key != mykey: continue
        #each rank should only allow keys in the range from myrank*N_stride to (myrank+1)*N_stride
        if key < myrank * N_stride: continue
        if key >= (myrank + 1) * N_stride: continue
        if key >= N_total: continue

        from dxtbx.model.experiment_list import ExperimentListFactory
        try:
            E = ExperimentListFactory.from_json_file(json_glob % key,
                                                     check_format=False)[0]
        except IOError as e:
            import sys
            print("trapped Error, continuing,", e, file=sys.stderr)
            continue
        C = E.crystal
        C.show()
        from six.moves import cPickle as pickle
        T = pickle.load(open(pickle_glob % key, "rb"))
        resolutions = T["d"]
        millers = T["miller_index"]
        nitem = len(resolutions)
        ycount += 1
        if maxy is not None and ycount > maxy:
            print("maxy break %d" % maxy)
            break
        print("THE ACTUAL JSON / PICKLE USED:", json_glob % key,
              pickle_glob % key)
        yield T, key
def test(dials_regression, tmpdir):
    """Test two theta refine on integrated data."""
    # use multiple scan small molecule data for this test
    data_dir = os.path.join(dials_regression, "xia2-28")
    prefix = ["20", "25", "30", "35"]
    exp_path = [e + "_integrated_experiments.json" for e in prefix]
    exp_path = [os.path.join(data_dir, e) for e in exp_path]
    pkl_path = [e + "_integrated.pickle" for e in prefix]
    pkl_path = [os.path.join(data_dir, e) for e in pkl_path]

    for pth in exp_path + pkl_path:
        assert os.path.exists(pth), "%s missing" % pth

    cmd = ([
        "dials.two_theta_refine",
        "cif=refined_cell.cif",
        "output.correlation_plot.filename=corrplot.png",
    ] + exp_path + pkl_path)

    print(cmd)

    # work in a temporary directory
    result = procrunner.run(cmd, working_directory=tmpdir)
    assert not result.returncode and not result.stderr
    assert tmpdir.join("refined_cell.expt").check()
    ref_exp = ExperimentListFactory.from_json_file(
        tmpdir.join("refined_cell.expt").strpath, check_format=False)

    xls = ref_exp.crystals()
    assert len(xls) == 1  # crystal models should have been combined
    xl = xls[0]

    # test refined crystal model against expected values
    assert xl.get_unit_cell().parameters() == pytest.approx(
        (5.428022880, 8.144145476, 12.039666971, 90.0, 90.0, 90.0), 1e-4)
    assert xl.get_cell_parameter_sd() == pytest.approx(
        (9.58081e-5, 0.000149909, 0.000215765, 0, 0, 0), 1e-4)
    assert xl.get_cell_volume_sd() == pytest.approx(0.0116254298, 1e-4)
Beispiel #27
0
def test_experiments(dials_data, tmpdir):
    input_filename = dials_data("centroid_test_data").join("experiments.json").strpath
    mask_filename = dials_data("centroid_test_data").join("lookup_mask.pickle").strpath
    output_filename = tmpdir.join("output.expt").strpath

    result = procrunner.run(
        [
            "dials.apply_mask",
            f"input.experiments={input_filename}",
            f"input.mask={mask_filename}",
            f"output.experiments={output_filename}",
        ],
        working_directory=tmpdir.strpath,
    )
    assert not result.returncode and not result.stderr

    from dxtbx.model.experiment_list import ExperimentListFactory

    experiments = ExperimentListFactory.from_json_file(output_filename)

    assert len(experiments) == 1
    imageset = experiments[0].imageset
    assert imageset.external_lookup.mask.filename == mask_filename
Beispiel #28
0
def get_items(myrank):
    from post5_ang_misset import parse_postrefine
    postreffed = parse_postrefine()
    print("# postrefined images", len(postreffed))
    maxy = 2001
    ycount = 0
    for key in postreffed:
        #each rank should only allow keys in the range from myrank*N_stride to (myrank+1)*N_stride
        if key < myrank * N_stride: continue
        if key >= (myrank + 1) * N_stride: continue

        from dxtbx.model.experiment_list import ExperimentListFactory
        E = ExperimentListFactory.from_json_file(json_glob % key,
                                                 check_format=False)[0]
        C = E.crystal
        from six.moves import cPickle as pickle
        T = pickle.load(open(pickle_glob % key, "rb"))
        resolutions = T["d"]
        millers = T["miller_index"]
        nitem = len(resolutions)
        ycount += 1
        if ycount > maxy: break
        yield T, key
Beispiel #29
0
def test_slice_sweep_with_first_images_missing(dials_regression, tmpdir):
  """Test slicing where scan image range does not start at 1, exercising
  a case that exposed a bug"""

  tmpdir.chdir()

   # use the i04_weak_data for this test
  data_dir = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data")
  experiments_path = os.path.join(data_dir, "experiments.json")

  # first slice
  cmd = "dials.slice_sweep " + experiments_path + " image_range=5,20"
  result = easy_run.fully_buffered(command=cmd).raise_if_errors()

  # second slice
  cmd = "dials.slice_sweep experiments_5_20.json image_range=10,20"
  result = easy_run.fully_buffered(command=cmd).raise_if_errors()

  sliced_exp = ExperimentListFactory.from_json_file("experiments_5_20_10_20.json",
              check_format=False)[0]
  assert sliced_exp.scan.get_image_range() == (10, 20)
  assert sliced_exp.scan.get_array_range() == (9, 20)
  assert sliced_exp.scan.get_oscillation()[0] == pytest.approx(83.35)
Beispiel #30
0
def get_json_w_mask_img_2d(experiments_list_path, img_num):
    print("experiments_list_path, img_num:", experiments_list_path, img_num)
    pan_num = 0
    experiments_path = experiments_list_path[0]
    print("importing from:", experiments_path)
    experiments = ExperimentListFactory.from_json_file(experiments_path)

    on_sweep_img_num, n_sweep = get_correct_img_num_n_sweep_num(
        experiments, img_num)

    try:
        imageset_tmp = experiments.imagesets()[n_sweep]
        mask_file = imageset_tmp.external_lookup.mask.filename
        pick_file = open(mask_file, "rb")
        mask_tup_obj = pickle.load(pick_file)
        pick_file.close()
        mask_flex = mask_tup_obj[0]
        str_data = img_stream_ext.mask_arr_2_str(mask_flex)

    except FileNotFoundError:
        str_data = None

    return str_data
Beispiel #31
0
  # Output datasize
  try:
    datasizeidx = [a.find("datasize")==0 for a in args].index(True)
  except ValueError:
    print "You really should supply datasize, but ok"
#    raise ValueError,"Output datasze file must be specified."
  else:
    datasize = args.pop(datasizeidx).split("=")[1]

  import copy, os

  import dxtbx
  from dxtbx.model.experiment_list import ExperimentListFactory
  from dials.array_family import flex
   
  experiments = ExperimentListFactory.from_json_file(json, check_format=False)
  beam = experiments[0].beam
  detector = experiments[0].detector
 
  lab_coordinates = flex.vec3_double()
  for panel in detector: 
    pixels = flex.vec2_double(panel.get_image_size())
    mms = panel.pixel_to_millimeter(pixels)
    lab_coordinates.extend(panel.get_lab_coord(mms))

    # generate s1 vectors
  s1 = lab_coordinates.each_normalize() * (1/beam.get_wavelength())
    # Generate x vectors
  x = np.asarray(s1 - beam.get_s0())
    
#  DATAsize = np.asarray(detector[0].get_image_size())
Beispiel #32
0
    amatrixdiridx = [a.find("amatrix_dir")==0 for a in args].index(True)
  except ValueError:
    amatrix_dir_prefix = "tmpdir_"
  else:
    amatrix_dir_prefix = args.pop(amatrixdiridx).split("=")[1]

  if (not (rotation_series or stills_process)):
    raise ValueError,"Must specify image_glob (for rotation series) or index_glob (for stills)."

  import copy, os

  import dxtbx
  from dxtbx.model.experiment_list import ExperimentListFactory
  from dials.array_family import flex
   
  experiments = ExperimentListFactory.from_json_file(metro, check_format=False)
  beam = experiments[0].beam
  detector = experiments[0].detector

  print "s0 from experiment[0]: ",beam.get_s0()
  print "wavelength from experiment[0]: ",beam.get_wavelength()
 
  lab_coordinates = flex.vec3_double()
  for panel in detector: 
    pixels = flex.vec2_double(panel.get_image_size())
    mms = panel.pixel_to_millimeter(pixels)
    lab_coordinates.extend(panel.get_lab_coord(mms))

    # generate s1 vectors
  s1 = lab_coordinates.each_normalize() * (1/beam.get_wavelength())
    # Generate x vectors