예제 #1
0
def test_reindex_multi_sequence(dials_regression, tmpdir):
    data_dir = os.path.join(dials_regression, "indexing_test_data",
                            "multi_sweep")
    pickle_path = os.path.join(data_dir, "indexed.pickle")
    experiments_path = os.path.join(data_dir, "experiments.json")
    commands = [
        "dials.reindex",
        pickle_path,
        experiments_path,
        "change_of_basis_op=x+y,x-z,y-z",
    ]

    result = procrunner.run(commands, working_directory=tmpdir)
    assert not result.returncode and not result.stderr
    assert tmpdir.join("reindexed.refl").check()
    assert tmpdir.join("reindexed.expt").check()

    old_reflections = pickle_loads(open(pickle_path, "rb").read())
    new_reflections = pickle_loads(tmpdir.join("reindexed.refl").read("rb"))
    assert len(old_reflections) == len(new_reflections)
    new_experiments = load.experiment_list(
        tmpdir.join("reindexed.expt").strpath, check_format=False)
    new_cs = new_experiments[0].crystal.get_crystal_symmetry()
    assert new_cs.unit_cell().parameters() == pytest.approx((
        6.189939294071243,
        6.189939294071243,
        6.189939294071242,
        113.16417286469935,
        107.65690626466579,
        107.65690626466579,
    ))
    assert (new_experiments[0].crystal.get_space_group().type().hall_symbol()
            == " I 4 (x+y,y+z,x+z)")
예제 #2
0
    def cosym(self):
        logger.debug("Running cosym analysis")
        cosym = DialsCosym()
        auto_logfiler(cosym)

        experiments_filename = self._data_manager.export_experiments(
            "tmp.expt")
        reflections_filename = self._data_manager.export_reflections(
            "tmp.refl")
        cosym.add_experiments_json(experiments_filename)
        cosym.add_reflections_file(reflections_filename)
        if self._params.symmetry.space_group is not None:
            cosym.set_space_group(self._params.symmetry.space_group.group())
        cosym.run()
        self._cosym_analysis = cosym.get_cosym_analysis()
        self._experiments_filename = cosym.get_reindexed_experiments()
        self._reflections_filename = cosym.get_reindexed_reflections()
        self._data_manager.experiments = load.experiment_list(
            self._experiments_filename, check_format=False)
        self._data_manager.reflections = flex.reflection_table.from_file(
            self._reflections_filename)

        if self._params.symmetry.space_group is None:
            best_solution = cosym.get_best_solution()
            best_space_group = sgtbx.space_group(
                str(best_solution["patterson_group"])
            ).build_derived_acentric_group()
            self._params.symmetry.space_group = best_space_group.info()
            logger.info("Space group determined by dials.cosym: %s" %
                        best_space_group.info())

        return
예제 #3
0
def test_import_still_sequence_as_experiments_subset(dials_data, tmpdir):
    image_files = dials_data("centroid_test_data").listdir("centroid*.cbf",
                                                           sort=True)[3:6]

    out = "experiments_as_still.expt"

    _ = procrunner.run(
        [
            "dials.import", "scan.oscillation=10,0",
            "output.experiments=%s" % out
        ] + [f.strpath for f in image_files],
        working_directory=tmpdir.strpath,
    )

    imported_exp = load.experiment_list(tmpdir.join(out).strpath)
    assert len(imported_exp) == len(image_files)
    for exp in imported_exp:
        assert exp.identifier != ""

    iset = set(exp.imageset for exp in imported_exp)
    assert len(iset) == 1

    # verify scans, goniometers kept too
    assert all(exp.scan.get_oscillation() == (10.0, 0.0)
               for exp in imported_exp)
    assert all(exp.goniometer is not None for exp in imported_exp)
예제 #4
0
def test_symmetry_laue_only(dials_data, tmpdir):
    """Simple test to check that dials.symmetry completes"""

    result = procrunner.run(
        [
            "dials.symmetry",
            dials_data("l_cysteine_dials_output") /
            "20_integrated_experiments.json",
            dials_data("l_cysteine_dials_output") / "20_integrated.pickle",
            dials_data("l_cysteine_dials_output") /
            "25_integrated_experiments.json",
            dials_data("l_cysteine_dials_output") / "25_integrated.pickle",
            "systematic_absences.check=False",
        ],
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr
    assert tmpdir.join("symmetrized.refl").check()
    assert tmpdir.join("symmetrized.expt").check()
    exps = load.experiment_list(tmpdir.join("symmetrized.expt").strpath,
                                check_format=False)
    assert str(exps[0].crystal.get_space_group().info()) == "P 2 2 2"
    joint_reflections = flex.reflection_table.from_file(
        tmpdir.join("symmetrized.refl").strpath)
    # check that there are 2 unique id and imageset_ids, and that these
    # correctly correspond to each experiment
    assert len(set(joint_reflections["id"])) == 2
    assert len(set(joint_reflections["imageset_id"])) == 2
    for id_ in range(2):
        sel = joint_reflections["id"] == id_
        assert set(joint_reflections["imageset_id"].select(sel)) == {id_}
예제 #5
0
def test_with_convert_sequences_to_stills(dials_data, tmpdir):
    image_files = dials_data("centroid_test_data").listdir("centroid*.cbf", sort=True)
    result = procrunner.run(
        [
            "dials.import",
            "convert_sequences_to_stills=True",
            "output.experiments=experiments_as_stills.expt",
        ]
        + [f.strpath for f in image_files],
        working_directory=tmpdir.strpath,
    )
    assert not result.returncode and not result.stderr
    assert tmpdir.join("experiments_as_stills.expt").check(file=1)

    experiments = load.experiment_list(
        tmpdir.join("experiments_as_stills.expt").strpath
    )
    for exp in experiments:
        assert exp.identifier != ""

    # should be no goniometers
    assert experiments.scans() == [None]
    assert experiments.goniometers() == [None]

    # should be same number of imagesets as images
    assert len(experiments.imagesets()) == len(image_files)

    # all should call out as still too
    assert experiments.all_stills()
예제 #6
0
def test_symmetry_basis_changes_for_C2(tmpdir):
    """Test the correctness of change of basis operations in dials.symmetry

    Supply the unit cell of beta-lactamase, which triggers a change of
    basis from input to minimum during symmetry analysis."""
    os.chdir(tmpdir.strpath)
    unit_cell = (53.173, 61.245, 69.292, 90.0, 93.04675, 90.0)
    space_group = sgtbx.space_group_info("C 2").group()
    experiments, reflections, _ = generate_experiments_reflections(
        space_group=space_group,
        unit_cell=unit_cell,
        sample_size=1,
        map_to_minimum=False,
    )
    experiments.as_json("tmp.expt")
    expt_file = tmpdir.join("tmp.expt").strpath
    joint_table = flex.reflection_table()
    for r in reflections:
        joint_table.extend(r)
    joint_table.as_pickle("tmp.refl")
    refl_file = tmpdir.join("tmp.refl").strpath

    command = ["dials.symmetry", expt_file, refl_file]
    result = procrunner.run(command, working_directory=tmpdir.strpath)
    assert not result.returncode and not result.stderr
    assert tmpdir.join("symmetrized.refl").check(file=1)
    assert tmpdir.join("symmetrized.expt").check(file=1)

    expts = load.experiment_list(tmpdir.join("symmetrized.expt").strpath,
                                 check_format=False)
    for v, expected in zip(expts[0].crystal.get_unit_cell().parameters(),
                           unit_cell):
        assert v == pytest.approx(expected)
예제 #7
0
def load_reference_geometries(geometry_file_list):
    from dxtbx.serialize import load

    reference_components = []
    for file in geometry_file_list:
        try:
            experiments = load.experiment_list(file, check_format=False)
            assert len(experiments.detectors()) == 1
            assert len(experiments.beams()) == 1
            reference_detector = experiments.detectors()[0]
            reference_beam = experiments.beams()[0]
        except Exception:
            datablock = load.datablock(file)
            assert len(datablock) == 1
            imageset = datablock[0].extract_imagesets()[0]
            reference_detector = imageset.get_detector()
            reference_beam = imageset.get_beam()
        reference_components.append({
            'detector': reference_detector,
            'beam': reference_beam,
            'file': file
        })

    import itertools
    for combination in itertools.combinations(reference_components, 2):
        if compare_geometries(combination[0]['detector'],
                              combination[1]['detector']):
            from xia2.Handlers.Streams import Chatter
            Chatter.write(
                'Reference geometries given in %s and %s are too similar' %
                (combination[0]['file'], combination[1]['file']))
            raise Exception('Reference geometries too similar')
    return reference_components
예제 #8
0
def test_symmetry_with_laue_group_override(dials_data, tmpdir):
    """Simple test to check that dials.symmetry, with overridden laue group, completes"""

    result = procrunner.run(
        [
            "dials.symmetry",
            "laue_group=P121",
            "change_of_basis_op=-b,-a,-c",
            dials_data("l_cysteine_dials_output") /
            "20_integrated_experiments.json",
            dials_data("l_cysteine_dials_output") / "20_integrated.pickle",
            dials_data("l_cysteine_dials_output") /
            "25_integrated_experiments.json",
            dials_data("l_cysteine_dials_output") / "25_integrated.pickle",
        ],
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr
    assert tmpdir.join("symmetrized.refl").check()
    assert tmpdir.join("symmetrized.expt").check()
    expts = load.experiment_list(tmpdir.join("symmetrized.expt").strpath,
                                 check_format=False)
    assert str(expts[0].crystal.get_space_group().info()) == "P 1 21 1"
    # Verify that the unit cell has been reindexed correctly
    assert expts[0].crystal.get_unit_cell().parameters() == pytest.approx(
        (8.21578444269, 5.4815363434, 12.1457047712, 90.0, 90.0, 90.0))
예제 #9
0
def test_cosym_partial_dataset(dials_data, tmpdir):
    """Test how cosym handles partial/bad datasets."""
    mcp = dials_data("multi_crystal_proteinase_k")
    command = ["dials.cosym"]
    for i in [1, 2]:
        command.append(mcp.join("experiments_%d.json" % i).strpath)
        command.append(mcp.join("reflections_%d.pickle" % i).strpath)
    # Make one dataset that will be removed in prefiltering
    r = flex.reflection_table.from_file(
        mcp.join("reflections_8.pickle").strpath)
    r["partiality"] = flex.double(r.size(), 0.1)
    r.as_file(tmpdir.join("renamed.refl").strpath)
    command.append(tmpdir.join("renamed.refl").strpath)
    command.append(mcp.join("experiments_8.json").strpath)
    # Add another good dataset at the end of the input list
    command.append(mcp.join("experiments_10.json").strpath)
    command.append(mcp.join("reflections_10.pickle").strpath)

    result = procrunner.run(command, working_directory=tmpdir.strpath)
    assert not result.returncode and not result.stderr
    assert tmpdir.join("symmetrized.refl").check(file=1)
    assert tmpdir.join("symmetrized.expt").check(file=1)
    experiments = load.experiment_list(tmpdir.join("symmetrized.expt").strpath,
                                       check_format=False)
    assert len(experiments) == 3
예제 #10
0
def test_targeted_scaling(dials_data, tmpdir):
    """Test the targeted scaling workflow."""
    location = dials_data("l_cysteine_4_sweeps_scaled")
    target_refl = location.join("scaled_35.refl").strpath
    target_expt = location.join("scaled_35.expt").strpath

    data_dir = dials_data("l_cysteine_dials_output")
    refl_1 = data_dir / "20_integrated.pickle"
    expt_1 = data_dir / "20_integrated_experiments.json"
    refl_2 = data_dir / "25_integrated.pickle"
    expt_2 = data_dir / "25_integrated_experiments.json"

    # Do targeted scaling, use this as a chance to test the KB model as well.
    extra_args = ["model=KB"]
    run_one_scaling(tmpdir, [target_refl, refl_1, target_expt, expt_1] + extra_args)

    scaled_exp = tmpdir.join("scaled.expt").strpath
    scaled_refl = tmpdir.join("scaled.refl").strpath
    experiments_list = load.experiment_list(scaled_exp, check_format=False)
    assert len(experiments_list.scaling_models()) == 2
    assert experiments_list.scaling_models()[0].id_ == "physical"
    assert experiments_list.scaling_models()[1].id_ == "KB"

    extra_args = ["model=KB", "only_target=True"]
    run_one_scaling(tmpdir, [refl_2, scaled_refl, expt_2, scaled_exp] + extra_args)
예제 #11
0
def test_model_connectivity(dials_data):
    """Test that dials.show experiments_has_model option."""
    location = dials_data("l_cysteine_dials_output")
    expts = load.experiment_list(
        location.join("indexed.expt").strpath, check_format=False
    )
    assert (
        model_connectivity(expts)
        == """\
Experiment / Models

Detector:
              0  1
Experiment 0  x  .
Experiment 1  x  .
Experiment 2  x  .
Experiment 3  .  x

Crystal:
              0
Experiment 0  x
Experiment 1  x
Experiment 2  x
Experiment 3  x

Beam:
              0
Experiment 0  x
Experiment 1  x
Experiment 2  x
Experiment 3  x"""
    )
예제 #12
0
def test_slice_sequence_with_first_images_missing(dials_regression, tmpdir):
    """Test slicing where scan image range does not start at 1, exercising
    a case that exposed a bug"""

    # use the i04_weak_data for this test
    data_dir = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data")
    experiments_path = os.path.join(data_dir, "experiments.json")

    # first slice
    result = procrunner.run(
        ["dials.slice_sequence", experiments_path, "image_range=5,20"],
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr

    # second slice
    result = procrunner.run(
        ["dials.slice_sequence", "experiments_5_20.expt", "image_range=10,20"],
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr

    sliced_exp = load.experiment_list(
        tmpdir.join("experiments_5_20_10_20.expt").strpath, check_format=False
    )[0]
    assert sliced_exp.scan.get_image_range() == (10, 20)
    assert sliced_exp.scan.get_array_range() == (9, 20)
    assert sliced_exp.scan.get_oscillation()[0] == pytest.approx(83.35)
예제 #13
0
def test_mtz_primitive_cell(dials_data, tmp_path):
    scaled_expt = dials_data("insulin_processed", pathlib=True) / "scaled.expt"
    scaled_refl = dials_data("insulin_processed", pathlib=True) / "scaled.refl"

    # First reindex to the primitive setting
    expts = load.experiment_list(scaled_expt, check_format=False)
    cs = expts[0].crystal.get_crystal_symmetry()
    cb_op = cs.change_of_basis_op_to_primitive_setting()
    procrunner.run(
        [
            "dials.reindex",
            scaled_expt,
            scaled_refl,
            f'change_of_basis_op="{cb_op}"',
        ],
        working_directory=tmp_path,
    )

    # Now export the reindexed experiments/reflections
    procrunner.run(
        ["dials.export", tmp_path / "reindexed.expt", tmp_path / "reindexed.refl"],
        working_directory=tmp_path,
    )

    mtz_obj = mtz.object(str(tmp_path / "scaled.mtz"))
    cs_primitive = cs.change_basis(cb_op)
    assert mtz_obj.space_group() == cs_primitive.space_group()
    refl = flex.reflection_table.from_file(scaled_refl)
    refl = refl.select(~refl.get_flags(refl.flags.bad_for_scaling, all=False))
    for ma in mtz_obj.as_miller_arrays():
        assert ma.crystal_symmetry().is_similar_symmetry(cs_primitive)
        assert ma.d_max_min() == pytest.approx(
            (flex.max(refl["d"]), flex.min(refl["d"]))
        )
예제 #14
0
def test_mmcif_p1_narrow_wedge(dials_data, tmp_path):
    """Call dials.export format=mmcif after scaling"""
    data_dir = dials_data("x4wide_processed", pathlib=True)

    refl = flex.reflection_table.from_file(data_dir /
                                           "AUTOMATIC_DEFAULT_scaled.refl")
    refl = slice_reflections(refl, [(1, 3)])
    refl.as_file(tmp_path / "p1_narrow.refl")

    expts = load.experiment_list(data_dir / "AUTOMATIC_DEFAULT_scaled.expt",
                                 check_format=False)
    expts = slice_experiments(expts, [(1, 3)])
    expts[0].crystal.set_space_group(sgtbx.space_group())
    expts.as_file(tmp_path / "p1_narrow.expt")

    command = [
        "dials.export",
        "format=mmcif",
        tmp_path / "p1_narrow.expt",
        tmp_path / "p1_narrow.refl",
        "mmcif.hklout=scaled.mmcif",
        "compress=None",
    ]
    result = procrunner.run(command, working_directory=tmp_path)
    assert not result.returncode and not result.stderr
    assert (tmp_path / "scaled.mmcif").is_file()

    model = iotbx.cif.reader(file_path=str(tmp_path / "scaled.mmcif")).model()
    assert model["dials"]["_reflns.pdbx_redundancy"] == "1.0"
    assert model["dials"]["_reflns.pdbx_CC_half"] == "0.0"
예제 #15
0
  def load_reference_geometry(self, params):
    '''
    Load a reference geometry file

    '''
    from collections import namedtuple
    # Load reference geometry
    reference_detector = None
    reference_beam = None
    if params.input.reference_geometry is not None:
      from dxtbx.serialize import load
      experiments, datablock = None, None
      try:
        experiments = load.experiment_list(
          params.input.reference_geometry, check_format=False)
      except Exception, e:
        datablock = load.datablock(params.input.reference_geometry)
      assert experiments or datablock, 'Could not import reference geometry'
      if experiments:
        assert len(experiments.detectors()) >= 1
        assert len(experiments.beams()) >= 1
        if len(experiments.detectors()) > 1:
          raise Sorry('The reference geometry file contains %d detector definitions, but only a single definition is allowed.' % len(experiments.detectors()))
        if len(experiments.beams()) > 1:
          raise Sorry('The reference geometry file contains %d beam definitions, but only a single definition is allowed.' % len(experiments.beams()))
        reference_detector = experiments.detectors()[0]
        reference_beam = experiments.beams()[0]
        reference_goniometer = experiments.goniometers()[0]
      else:
        assert len(datablock) == 1
        imageset = datablock[0].extract_imagesets()[0]
        reference_detector = imageset.get_detector()
        reference_beam = imageset.get_beam()
        reference_goniometer = imageset.get_goniometer()
예제 #16
0
def test_proteinase_k_filter_deltacchalf(regression_test, dials_data, tmpdir):
    data_dir = dials_data("multi_crystal_proteinase_k")
    expts = sorted(f.strpath for f in data_dir.listdir("experiments*.json"))
    refls = sorted(f.strpath for f in data_dir.listdir("reflections*.pickle"))
    with tmpdir.as_cwd():
        command_line_args = (expts + refls + [
            "filtering.method=deltacchalf",
            "filtering.deltacchalf.stdcutoff=1",
            "max_clusters=2",
        ])
        run_multiplex(command_line_args)
    for f in expected_data_files + [
            "filtered.expt",
            "filtered.refl",
            "filtered.mtz",
            "filtered_unmerged.mtz",
    ]:
        assert tmpdir.join(f).check(file=1), "expected file %s missing" % f
    for expt_file, n_expected in (("scaled.expt", 8), ("filtered.expt", 7)):
        expts = load.experiment_list(tmpdir.join(expt_file).strpath,
                                     check_format=False)
        assert len(expts) == n_expected

    # Check that clusters 5 and 6 have been scaled
    for cluster in ("cluster_5", "cluster_6"):
        assert tmpdir.join(cluster).check(dir=1)
        assert tmpdir.join(cluster, "scaled.mtz").check(file=1)
        assert tmpdir.join(cluster, "scaled_unmerged.mtz").check(file=1)

    # Delete large temporary files to conserve disk space
    for f in tmpdir.listdir("*.refl"):
        f.remove()
    for cluster in ("cluster_5", "cluster_6"):
        for f in tmpdir.join(cluster).listdir("*.refl"):
            f.remove()
예제 #17
0
def test_mtz_recalculated_cell(dials_data, tmpdir):
    # First run dials.two_theta_refine to ensure that the crystals have
    # recalculated_unit_cell set
    scaled_expt = dials_data("x4wide_processed").join(
        "AUTOMATIC_DEFAULT_scaled.expt")
    scaled_refl = dials_data("x4wide_processed").join(
        "AUTOMATIC_DEFAULT_scaled.refl")
    result = procrunner.run(
        ["dials.two_theta_refine", scaled_expt, scaled_refl],
        working_directory=tmpdir,
    )
    assert tmpdir.join("refined_cell.expt").check(file=1)
    refined_expt = load.experiment_list(
        tmpdir.join("refined_cell.expt").strpath, check_format=False)
    ttr_cell = refined_expt.crystals()[0].get_recalculated_unit_cell()

    d_min = 1.3
    result = procrunner.run(
        [
            "dials.export",
            "format=mtz",
            tmpdir.join("refined_cell.expt"),
            scaled_refl,
            "d_min=%f" % d_min,
        ],
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr
    assert tmpdir.join("scaled.mtz").check(file=1)
    # The resulting mtz should have the same unit cell set as the recalculated_unit_cell
    # from dials.two_theta_refine
    for ma in mtz.object(tmpdir.join("scaled.mtz").strpath).as_miller_arrays():
        assert ttr_cell.parameters() == pytest.approx(
            ma.unit_cell().parameters())
        assert ma.d_min() >= d_min
예제 #18
0
def test_refine_bravais_settings_2(dials_regression, tmpdir):
  tmpdir.chdir()

  data_dir = os.path.join(dials_regression, "indexing_test_data", "multi_sweep")
  pickle_path = os.path.join(data_dir, "indexed.pickle")
  experiments_path = os.path.join(data_dir, "experiments.json")
  commands = ["dials.refine_bravais_settings", pickle_path, experiments_path]
  command = " ".join(commands)
  print(command)
  result = easy_run.fully_buffered(command=command).raise_if_errors()
  for i in range(1, 10):
    assert os.path.exists("bravais_setting_%i.json" %i)
  from dxtbx.serialize import load
  experiments_list = load.experiment_list(
    "bravais_setting_9.json", check_format=False)
  assert len(experiments_list) == 4
  assert len(experiments_list.crystals()) == 1
  assert experiments_list[0].crystal.get_unit_cell().is_similar_to(
    uctbx.unit_cell((7.31, 7.31, 6.82, 90.00, 90.00, 90.00)))
  assert experiments_list[0].crystal.get_space_group().type().hall_symbol() \
         == ' I 4'
  assert os.path.exists("bravais_summary.json")
  with open("bravais_summary.json", "rb") as fh:
    bravais_summary = json.load(fh)
  for i in range(1, 23): assert str(i) in bravais_summary.keys()

  assert bravais_summary['9']['unit_cell'] == pytest.approx(
    [7.31, 7.31, 6.82, 90.00, 90.00, 90.00], abs=1e-1)
  assert bravais_summary['9']['bravais'] == 'tI'
  assert bravais_summary['9']['rmsd'] == pytest.approx(0.103, abs=1e-2)
  assert bravais_summary['9']['recommended'] == True
예제 #19
0
def test_search_small_molecule(dials_data, run_in_tmpdir):
    """Perform a beam-centre search on a multi-sequence data set..

    Do the following:
    1. Run dials.search_beam_centre on a single datablock and pickled
    reflection table containing multiple experiment IDs, as output by
    dials.find_spots;
      a) Check that the program exits correctly;
      b) Check that it produces the expected output datablock.
    2. Check that the beam centre search has resulted in the expected shift
    in detector origin.
    """

    data = dials_data("l_cysteine_dials_output")
    datablock_path = data.join("datablock.json").strpath
    pickle_path = data.join("strong.pickle").strpath

    args = ["dials.search_beam_position", datablock_path, pickle_path]
    print(args)
    result = procrunner.run(args)
    assert not result.returncode and not result.stderr
    assert os.path.exists("optimised.expt")

    from dxtbx.serialize import load

    datablocks = load.datablock(datablock_path, check_format=False)
    original_imageset = datablocks[0].extract_imagesets()[0]
    detector_1 = original_imageset.get_detector()
    optimized_experiments = load.experiment_list("optimised.expt",
                                                 check_format=False)
    detector_2 = optimized_experiments[0].detector
    shift = scitbx.matrix.col(detector_1[0].get_origin()) - scitbx.matrix.col(
        detector_2[0].get_origin())
    print(shift)
    assert shift.elems == pytest.approx((0.11, -1.03, 0.0), abs=1e-1)
예제 #20
0
def test_update_imageset_ids(dials_data):
    expts = ExperimentList()
    refls = []
    for i in [1, 2, 3, 4, 5, 7, 8, 10]:
        refls.append(
            flex.reflection_table.from_file(
                dials_data("multi_crystal_proteinase_k", pathlib=True) /
                f"reflections_{i}.pickle"))
        expts.extend(
            load.experiment_list(
                dials_data("multi_crystal_proteinase_k", pathlib=True) /
                f"experiments_{i}.json",
                check_format=False,
            ))
    # first make sure ids are set up correctly.
    experiments, reflections = assign_unique_identifiers(expts, refls)
    reflections = update_imageset_ids(experiments, reflections)
    joint_reflections = flex.reflection_table()
    for refls in reflections:
        joint_reflections.extend(refls)
    # check that there are 8 unique id and imageset_ids, and that these
    # correctly correspond to each experiment
    assert len(set(joint_reflections["id"])) == 8
    assert len(set(joint_reflections["imageset_id"])) == 8
    for id_ in range(8):
        sel = joint_reflections["id"] == id_
        assert set(joint_reflections["imageset_id"].select(sel)) == {id_}
예제 #21
0
def test_targeted_scaling_against_mtz(dials_data, tmpdir):
    """Test targeted scaling against an mtz generated with dials.scale."""
    location = dials_data("l_cysteine_4_sweeps_scaled")
    refl = location.join("scaled_35.refl").strpath
    expt = location.join("scaled_35.expt").strpath
    command = ["dials.scale", refl, expt, "unmerged_mtz=unmerged.mtz"]

    result = procrunner.run(command, working_directory=tmpdir)
    assert not result.returncode and not result.stderr
    assert tmpdir.join("scaled.expt").check()
    assert tmpdir.join("scaled.refl").check()
    assert tmpdir.join("unmerged.mtz").check()

    refl = location.join("scaled_30.refl").strpath
    expt = location.join("scaled_30.expt").strpath
    target_mtz = tmpdir.join("unmerged.mtz").strpath
    command = ["dials.scale", refl, expt, "target_mtz=%s" % target_mtz]

    result = procrunner.run(command, working_directory=tmpdir)
    assert not result.returncode and not result.stderr
    assert tmpdir.join("scaled.expt").check()
    assert tmpdir.join("scaled.refl").check()
    expts = load.experiment_list(tmpdir.join("scaled.expt").strpath,
                                 check_format=False)
    assert len(expts) == 1
예제 #22
0
def test_multi_lattice(dials_regression, tmpdir):

    expts = os.path.join(
        dials_regression, "integration_test_data", "multi_lattice", "experiments.json"
    )

    experiments = load.experiment_list(expts)
    for i, expt in enumerate(experiments):
        expt.identifier = str(100 + i)
    experiments.as_json(tmpdir.join("modified_input.json").strpath)

    result = procrunner.run(
        [
            "dials.integrate",
            "nproc=1",
            "modified_input.json",
            os.path.join(
                dials_regression,
                "integration_test_data",
                "multi_lattice",
                "indexed.pickle",
            ),
            "prediction.padding=0",
        ],
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr
    assert tmpdir.join("integrated.refl").check()
    assert tmpdir.join("integrated.expt").check()

    experiments = load.experiment_list(tmpdir.join("integrated.expt").strpath)
    for i, expt in enumerate(experiments):
        assert expt.identifier == str(100 + i)

    table = flex.reflection_table.from_file(tmpdir / "integrated.refl")
    assert len(table) == 5605
    assert dict(table.experiment_identifiers()) == {0: "100", 1: "101"}

    # Check output contains from two lattices
    exp_id = list(set(table["id"]))
    assert len(exp_id) == 2

    # Check both lattices have integrated reflections
    mask = table.get_flags(table.flags.integrated_prf)
    table = table.select(mask)
    exp_id = list(set(table["id"]))
    assert len(exp_id) == 2
예제 #23
0
def test_mtz_multi_wavelength(dials_data, tmp_path):
    """Test multi-wavelength mtz export"""
    # First make suitable input - multi datasets experiment list and reflection
    # table with different wavelengths
    mcp = dials_data("multi_crystal_proteinase_k", pathlib=True)
    exp_1 = load.experiment_list(mcp / "experiments_1.json", check_format=False)
    exp_2 = load.experiment_list(mcp / "experiments_2.json", check_format=False)
    refl_1 = flex.reflection_table.from_file(mcp / "reflections_1.pickle")
    refl_2 = flex.reflection_table.from_file(mcp / "reflections_2.pickle")

    exp_1[0].beam.set_wavelength(0.5)
    exp_2[0].beam.set_wavelength(1.0)

    exp_1.extend(exp_2)
    reflection_list = [refl_1, refl_2]
    exps, refls = assign_unique_identifiers(exp_1, reflection_list)
    joint_refl = flex.reflection_table()
    for r in refls:
        joint_refl.extend(r)
    exps.as_json(tmp_path / "tmp_exp.expt")
    joint_refl.as_file(tmp_path / "tmp_refl.refl")

    # Now run
    result = procrunner.run(
        [
            "dials.export",
            tmp_path / "tmp_exp.expt",
            tmp_path / "tmp_refl.refl",
            "format=mtz",
            "mtz.hklout=unmerged.mtz",
        ],
        environment_override={"DIALS_EXPORT_DO_NOT_CHECK_FORMAT": "True"},
        working_directory=tmp_path,
    )
    assert not result.returncode and not result.stderr
    assert (tmp_path / "unmerged.mtz").is_file()

    # Inspect output
    m = mtz.object(str(tmp_path / "unmerged.mtz")).crystals()
    n_batches = []
    wavelengths = []
    for crystal in m:
        for dataset in crystal.datasets():
            wavelengths.append(dataset.wavelength())
            n_batches.append(dataset.n_batches())
    assert n_batches == [0, 25, 25]  # base, dataset1, dataset2
    assert wavelengths == [0, 0.5, 1.0]  # base, dataset1, dataset2
예제 #24
0
def test_assign_identifiers(dials_data, run_in_tmpdir):
    """Test for dials.assign_experiment_identifiers"""
    pickle_path_list = []
    sequence_path_list = []
    data_dir = dials_data("l_cysteine_dials_output")
    for i in [20, 25]:
        pickle_path_list.append(data_dir / str(i) + "_integrated.pickle")
        sequence_path_list.append(data_dir / str(i) + "_integrated_experiments.json")

    run_assign_identifiers(pickle_path_list, sequence_path_list, extra_args=[])

    r = flex.reflection_table.from_file("assigned.refl")
    e = load.experiment_list("assigned.expt", check_format=False)
    r.assert_experiment_identifiers_are_consistent(e)
    assert list(r.experiment_identifiers().values()) != ["", ""]
    assert list(r.experiment_identifiers().keys()) == [0, 1]
    assert list(e.identifiers()) == list(r.experiment_identifiers().values())

    # now run again, with already assigned data
    pickle_path_list = ["assigned.refl"]
    sequence_path_list = ["assigned.expt"]
    run_assign_identifiers(pickle_path_list, sequence_path_list, extra_args=[])

    r = flex.reflection_table.from_file("assigned.refl")
    e = load.experiment_list("assigned.expt", check_format=False)
    r.assert_experiment_identifiers_are_consistent(e)
    assert list(r.experiment_identifiers().values()) != ["", ""]
    assert list(r.experiment_identifiers().keys()) == [0, 1]
    assert list(e.identifiers()) == list(r.experiment_identifiers().values())

    # now run again, with adding more data
    pickle_path_list = ["assigned.refl"]
    sequence_path_list = ["assigned.expt"]
    for i in [30, 35]:
        pickle_path_list.append(data_dir / str(i) + "_integrated.pickle")
        sequence_path_list.append(data_dir / str(i) + "_integrated_experiments.json")

    run_assign_identifiers(
        pickle_path_list, sequence_path_list, extra_args=["identifiers=0 5 10 15"]
    )

    r = flex.reflection_table.from_file("assigned.refl")
    e = load.experiment_list("assigned.expt", check_format=False)
    r.assert_experiment_identifiers_are_consistent(e)
    assert list(r.experiment_identifiers().values()) == ["0", "5", "10", "15"]
    assert list(r.experiment_identifiers().keys()) == [0, 1, 2, 3]
    assert list(e.identifiers()) == ["0", "5", "10", "15"]
예제 #25
0
def test_fast_slow_beam_centre(dials_regression, run_in_tmpdir):
    # test slow_fast_beam_centre with a multi-panel CS-PAD image
    impath = os.path.join(
        dials_regression,
        "image_examples",
        "LCLS_cspad_nexus",
        "idx-20130301060858401.cbf",
    )
    result = procrunner.run([
        "dials.import",
        "fast_slow_beam_centre=42,134,18",
        "output.experiments=fast_slow_beam_centre.expt",
        impath,
    ])
    assert not result.returncode and not result.stderr
    assert os.path.exists("fast_slow_beam_centre.expt")

    experiments = load.experiment_list("fast_slow_beam_centre.expt")
    imgset = experiments[0].imageset
    assert experiments[0].identifier != ""
    # beam centre on 18th panel
    s0 = imgset.get_beam().get_s0()
    beam_centre = imgset.get_detector()[18].get_beam_centre_px(s0)
    assert beam_centre == pytest.approx((42, 134))

    # check relative panel positions have not changed
    from scitbx import matrix

    o = matrix.col(imgset.get_detector()[0].get_origin())
    offsets = []
    for p in imgset.get_detector():
        intra_pnl = o - matrix.col(p.get_origin())
        offsets.append(intra_pnl.length())

    result = procrunner.run(
        ["dials.import", "output.experiments=reference.expt", impath])
    assert not result.returncode and not result.stderr
    assert os.path.exists("reference.expt")

    ref_exp = load.experiment_list("reference.expt")
    ref_imset = ref_exp[0].imageset
    o = matrix.col(ref_imset.get_detector()[0].get_origin())
    ref_offsets = []
    for p in ref_imset.get_detector():
        intra_pnl = o - matrix.col(p.get_origin())
        ref_offsets.append(intra_pnl.length())
    assert offsets == pytest.approx(ref_offsets)
예제 #26
0
    def _integrate_prepare(self):
        """Prepare for integration - in XDS terms this may mean rerunning
        IDXREF to get the XPARM etc. DEFPIX is considered part of the full
        integration as it is resolution dependent."""

        Citations.cite("dials")

        # decide what images we are going to process, if not already
        # specified
        if not self._intgr_wedge:
            images = self.get_matching_images()
            self.set_integrater_wedge(min(images), max(images))

        logger.debug("DIALS INTEGRATE PREPARE:")
        logger.debug("Wavelength: %.6f" % self.get_wavelength())
        logger.debug("Distance: %.2f" % self.get_distance())

        if not self.get_integrater_low_resolution():

            dmax = self._intgr_refiner.get_indexer_low_resolution(
                self.get_integrater_epoch()
            )
            self.set_integrater_low_resolution(dmax)

            logger.debug(
                "Low resolution set to: %s" % self.get_integrater_low_resolution()
            )

        ## copy the data across
        refiner = self.get_integrater_refiner()
        # For multi-sweep refinement, get the split experiments from after refinement.
        if PhilIndex.params.xia2.settings.multi_sweep_refinement:
            self._intgr_experiments_filename = refiner.get_refiner_payload(
                f"{self._intgr_sweep._name}_models.expt"
            )
            self._intgr_indexed_filename = refiner.get_refiner_payload(
                f"{self._intgr_sweep._name}_observations.refl"
            )
        # Otherwise, there should only be a single experiment list and reflection table.
        else:
            self._intgr_experiments_filename = refiner.get_refiner_payload(
                "models.expt"
            )
            self._intgr_indexed_filename = refiner.get_refiner_payload(
                "observations.refl"
            )
        experiments = load.experiment_list(self._intgr_experiments_filename)
        experiment = experiments[0]

        # this is the result of the cell refinement
        self._intgr_cell = experiment.crystal.get_unit_cell().parameters()

        logger.debug("Files available at the end of DIALS integrate prepare:")
        for f in self._data_files:
            logger.debug("%s" % f)

        self.set_detector(experiment.detector)
        self.set_beam_obj(experiment.beam)
        self.set_goniometer(experiment.goniometer)
예제 #27
0
def centroid_test_data(dials_data):
    reflections = flex.reflection_table.from_file(
        dials_data("centroid_test_data").join("strong.pickle").strpath)
    experiments = load.experiment_list(
        dials_data("centroid_test_data").join("imported_experiments.json"))
    reflections.centroid_px_to_mm(experiments)
    reflections.map_centroids_to_reciprocal_space(experiments)
    return experiments, reflections
예제 #28
0
 def refine(self):
     # refine in correct bravais setting
     self._experiments_filename, self._reflections_filename = self._dials_refine(
         self._experiments_filename, self._reflections_filename)
     self._data_manager.experiments = load.experiment_list(
         self._experiments_filename, check_format=False)
     self._data_manager.reflections = flex.reflection_table.from_file(
         self._reflections_filename)
예제 #29
0
def centroid_test_data(dials_data):
    experiments = load.experiment_list(
        dials_data("centroid_test_data").join("experiments.json").strpath,
        check_format=False,
    )
    reflections = flex.reflection_table.from_file(
        dials_data("centroid_test_data").join("integrated.pickle").strpath)
    return {"reflections": reflections, "experiments": experiments}
예제 #30
0
def test_scale_set_absorption_level(dials_data, tmp_path):
    """Test that the absorption parameters are correctly set for the absorption option."""
    location = dials_data("l_cysteine_dials_output", pathlib=True)
    refl = location / "20_integrated.pickle"
    expt = location / "20_integrated_experiments.json"

    # exclude a central region of data to force the failure of the full matrix
    # minimiser due to indeterminate solution of the normal equations. In this
    # case, the error should be caught and scaling can proceed.
    command = [
        "dials.scale",
        refl,
        expt,
        "absorption_level=medium",
        "unmerged_mtz=unmerged.mtz",
    ]
    result = procrunner.run(command, working_directory=tmp_path)
    assert not result.returncode and not result.stderr
    assert (tmp_path / "scaled.refl").is_file()
    assert (tmp_path / "scaled.expt").is_file()
    expts = load.experiment_list(tmp_path / "scaled.expt", check_format=False)
    assert expts[0].scaling_model.configdict["lmax"] == 6
    assert expts[0].scaling_model.configdict["abs_surface_weight"] == 5e4
    abs_params = expts[0].scaling_model.components["absorption"].parameters
    result = get_merging_stats(tmp_path / "unmerged.mtz")
    assert result.overall.r_pim < 0.024
    assert result.overall.cc_one_half > 0.995
    assert result.overall.n_obs > 2300

    ## now scale again with different options, but fix the absorption surface to
    # test the correction.fix option.
    command = [
        "dials.scale",
        tmp_path / "scaled.refl",
        tmp_path / "scaled.expt",
        "error_model=None",
        "physical.correction.fix=absorption",
    ]
    result = procrunner.run(command, working_directory=tmp_path)
    assert not result.returncode and not result.stderr
    assert (tmp_path / "scaled.refl").is_file()
    assert (tmp_path / "scaled.expt").is_file()
    expts = load.experiment_list(tmp_path / "scaled.expt", check_format=False)
    new_abs_params = expts[0].scaling_model.components["absorption"].parameters
    assert abs_params == new_abs_params
예제 #31
0
def run(file_names):
  if len(file_names) == 1 and file_names[0].endswith('json'):
    from dxtbx.serialize import load
    try:
      datablock = load.datablock(file_names[0])
      assert len(datablock) == 1
      sweep = datablock[0].extract_sweeps()[0]
    except ValueError, e:
      if str(e) == '"__id__" does not equal "imageset"':
        experiments = load.experiment_list(file_names[0])
        assert len(experiments) == 1
        sweep = experiments[0].imageset
      else:
        raise
예제 #32
0
파일: strategy.py 프로젝트: xia2/xia2
    def process_one_strategy(args):
      assert len(args) == 4
      experiments, reflections, strategy, t_ref = args
      from xia2.Wrappers.EMBL import Best
      best = Best.BestStrategy()
      for isweep, (expt, refl) in enumerate(zip(experiments, reflections)):
        integrater = sweep._get_integrater()
        from xia2.Wrappers.Dials.ExportBest import ExportBest
        export = ExportBest()
        export.set_experiments_filename(expt)
        export.set_reflections_filename(refl)
        export.set_working_directory(wd)
        auto_logfiler(export)
        prefix = '%i_best' %export.get_xpid()
        export.set_prefix(prefix)
        export.run()
        if isweep == 0:
          imageset = sweep.get_imageset()
          scan = imageset.get_scan()
          best.set_t_ref(t_ref)
          best.set_mos_dat('%s.dat' %prefix)
          best.set_mos_par('%s.par' %prefix)
        best.add_mos_hkl('%s.hkl' %prefix)
      best.set_i2s(strategy.i_over_sigi)
      best.set_T_max(strategy.max_total_exposure)
      best.set_t_min(strategy.min_exposure)
      #best.set_trans_ref(25.0)
      best.set_S_max(strategy.max_rotation_speed)
      best.set_w_min(strategy.min_oscillation_width)
      best.set_M_min(strategy.multiplicity)
      best.set_C_min(strategy.completeness)
      best.set_anomalous(strategy.anomalous)

      best.set_detector('pilatus6m')
      best.set_working_directory(wd)
      auto_logfiler(best)
      xmlout = '%s/%i_best.xml' %(best.get_working_directory(), best.get_xpid())
      best.set_xmlout(xmlout)
      best.strategy()

      results = best.get_results_dict()
      results['description'] = strategy.description
      if 'phi_end' not in results:
        results['phi_end'] = str(
          float(results['phi_start']) +
          float(results['number_of_images']) * float(results['phi_width']))
      from dxtbx.serialize import load
      expt = load.experiment_list(experiments[0])[0]
      results['spacegroup'] = expt.crystal.get_space_group().type().lookup_symbol()
      return results
예제 #33
0
def exercise_refine_bravais_settings():
  if not have_dials_regression:
    print "Skipping exercise_refine_bravais_settings(): dials_regression not available."
    return

  data_dir = os.path.join(dials_regression, "indexing_test_data", "i04_weak_data")
  pickle_path = os.path.join(data_dir, "indexed.pickle")
  experiments_path = os.path.join(data_dir, "experiments.json")
  commands = ["dials.refine_bravais_settings",
              pickle_path,
              experiments_path,
              "reflections_per_degree=5",
              "minimum_sample_size=500",
              "beam.fix=all",
              "detector.fix=all"]
  command = " ".join(commands)
  print command
  cwd = os.path.abspath(os.curdir)
  tmp_dir = open_tmp_directory()
  os.chdir(tmp_dir)
  result = easy_run.fully_buffered(command=command).raise_if_errors()
  for i in range(1, 10):
    assert os.path.exists("bravais_setting_%i.json" %i)
  from dxtbx.serialize import load
  experiments_list = load.experiment_list(
    "bravais_setting_9.json", check_format=False)
  assert len(experiments_list) == 1
  assert experiments_list[0].crystal.get_unit_cell().is_similar_to(
    uctbx.unit_cell((57.782, 57.782, 150.011, 90, 90, 90)))
  assert experiments_list[0].crystal.get_space_group().type().hall_symbol() \
         == ' P 4'

  assert os.path.exists("bravais_summary.json")
  from json import load
  bravais_summary = load(open("bravais_summary.json", "rb"))
  assert bravais_summary.keys() == [
    '1', '3', '2', '5', '4', '7', '6', '9', '8']
  bravais_summary['9'].keys() == [
    'bravais', 'max_angular_difference', 'unit_cell', 'rmsd', 'nspots']

  assert approx_equal(
    bravais_summary['9']['unit_cell'],
    [57.78, 57.78, 150.0, 90.0, 90.0, 90.0], eps=1e-1)
  assert bravais_summary['9']['bravais'] == 'tP'
  assert bravais_summary['9']['recommended'] == True
  assert approx_equal(bravais_summary['9']['rmsd'], 0.047, eps=1e-2)
  os.chdir(cwd)
예제 #34
0
파일: import.py 프로젝트: biochem-fan/dials
  def run(self):
    ''' Parse the options. '''
    from dxtbx.datablock import DataBlockFactory
    from dxtbx.datablock import DataBlockTemplateImporter
    from dials.util.options import flatten_datablocks
    from dials.util import log
    from logging import info, debug
    import cPickle as pickle
    from libtbx.utils import Sorry

    # Parse the command line arguments
    params, options = self.parser.parse_args(show_diff_phil=False)
    datablocks = flatten_datablocks(params.input.datablock)

    # Configure logging
    log.config(
      params.verbosity,
      info=params.output.log,
      debug=params.output.debug_log)
    from dials.util.version import dials_version
    info(dials_version())

    # Log the diff phil
    diff_phil = self.parser.diff_phil.as_str()
    if diff_phil is not '':
      info('The following parameters have been modified:\n')
      info(diff_phil)

    # Load reference geometry
    reference_detector = None
    reference_beam = None
    if params.input.reference_geometry is not None:
      from dxtbx.serialize import load
      try:
        experiments = load.experiment_list(
          params.input.reference_geometry, check_format=False)
        assert len(experiments.detectors()) == 1
        assert len(experiments.beams()) == 1
        reference_detector = experiments.detectors()[0]
        reference_beam = experiments.beams()[0]
      except Exception, e:
        datablock = load.datablock(params.input.reference_geometry)
        assert len(datablock) == 1
        imageset = datablock[0].extract_imagesets()[0]
        reference_detector = imageset.get_detector()
        reference_beam = imageset.get_beam()
예제 #35
0
파일: __init__.py 프로젝트: xia2/xia2
def load_reference_geometries(geometry_file_list):
  from dxtbx.serialize import load

  reference_components = []
  for file in geometry_file_list:
    try:
      experiments = load.experiment_list(file, check_format=False)
      assert len(experiments.detectors()) == 1
      assert len(experiments.beams()) == 1
      reference_detector = experiments.detectors()[0]
      reference_beam = experiments.beams()[0]
    except Exception, e:
      datablock = load.datablock(file)
      assert len(datablock) == 1
      imageset = datablock[0].extract_imagesets()[0]
      reference_detector = imageset.get_detector()
      reference_beam = imageset.get_beam()
    reference_components.append({'detector': reference_detector, 'beam': reference_beam, 'file': file})
예제 #36
0
def exercise_refine_bravais_settings_3():
  if not have_dials_regression:
    print "Skipping exercise_refine_bravais_settings(): dials_regression not available."
    return

  data_dir = os.path.join(dials_regression, "indexing_test_data", "trypsin")
  pickle_path = os.path.join(data_dir, "indexed.pickle")
  experiments_path = os.path.join(data_dir, "experiments.json")
  commands = ["dials.refine_bravais_settings",
              pickle_path,
              experiments_path,
              "crystal_id=1"]
  command = " ".join(commands)
  print command
  cwd = os.path.abspath(os.curdir)
  tmp_dir = open_tmp_directory()
  os.chdir(tmp_dir)
  result = easy_run.fully_buffered(command=command).raise_if_errors()
  for i in range(1, 10):
    assert os.path.exists("bravais_setting_%i.json" %i)
  from dxtbx.serialize import load
  experiments_list = load.experiment_list(
    "bravais_setting_5.json", check_format=False)
  assert len(experiments_list) == 1
  assert experiments_list[0].crystal.get_unit_cell().is_similar_to(
    uctbx.unit_cell((54.37, 58.29, 66.51, 90.00, 90.00, 90.00)))
  assert experiments_list[0].crystal.get_space_group().type().hall_symbol() \
         == ' P 2 2'

  assert os.path.exists("bravais_summary.json")
  from json import load
  bravais_summary = load(open("bravais_summary.json", "rb"))
  assert bravais_summary.keys() == [
    '1', '3', '2', '5', '4', '7', '6', '9', '8']

  assert approx_equal(
    bravais_summary['5']['unit_cell'],
    [54.37, 58.29, 66.51, 90.00, 90.00, 90.00], eps=1e-1)
  assert bravais_summary['5']['bravais'] == 'oP'
  assert approx_equal(bravais_summary['5']['rmsd'], 0.1200, eps=1e-2)
  assert bravais_summary['5']['recommended'] == True
  assert bravais_summary['9']['recommended'] == False
  os.chdir(cwd)
예제 #37
0
def exercise_refine_bravais_settings_2():
  if not have_dials_regression:
    print "Skipping exercise_refine_bravais_settings(): dials_regression not available."
    return

  data_dir = os.path.join(dials_regression, "indexing_test_data", "multi_sweep")
  pickle_path = os.path.join(data_dir, "indexed.pickle")
  experiments_path = os.path.join(data_dir, "experiments.json")
  commands = ["dials.refine_bravais_settings", pickle_path, experiments_path]
  command = " ".join(commands)
  print command
  cwd = os.path.abspath(os.curdir)
  tmp_dir = open_tmp_directory()
  os.chdir(tmp_dir)
  result = easy_run.fully_buffered(command=command).raise_if_errors()
  for i in range(1, 10):
    assert os.path.exists("bravais_setting_%i.json" %i)
  from dxtbx.serialize import load
  experiments_list = load.experiment_list(
    "bravais_setting_9.json", check_format=False)
  assert len(experiments_list) == 4
  assert len(experiments_list.crystals()) == 1
  assert experiments_list[0].crystal.get_unit_cell().is_similar_to(
    uctbx.unit_cell((7.31, 7.31, 6.82, 90.00, 90.00, 90.00)))
  assert experiments_list[0].crystal.get_space_group().type().hall_symbol() \
         == ' I 4'
  assert os.path.exists("bravais_summary.json")
  from json import load
  bravais_summary = load(open("bravais_summary.json", "rb"))
  for i in range(1, 23): assert str(i) in bravais_summary.keys()

  assert approx_equal(
    bravais_summary['9']['unit_cell'],
    [7.31, 7.31, 6.82, 90.00, 90.00, 90.00], eps=1e-1)
  assert bravais_summary['9']['bravais'] == 'tI'
  assert approx_equal(bravais_summary['9']['rmsd'], 0.103, eps=1e-2)
  assert bravais_summary['9']['recommended'] == True
  os.chdir(cwd)
예제 #38
0
def run(args):

  from dials.util.options import OptionParser
  from dials.util.options import flatten_datablocks
  import libtbx.load_env

  usage = "%s [options] datablock.json reference=reference_datablock.json" %(
    libtbx.env.dispatcher_name)

  parser = OptionParser(
    usage=usage,
    phil=phil_scope,
    read_datablocks=True,
    check_format=False,
    epilog=help_message)

  params, options = parser.parse_args(show_diff_phil=True)
  datablocks = flatten_datablocks(params.input.datablock)

  if len(datablocks) == 0:
    parser.print_help()
    exit()

  # Load reference geometry
  reference_detector = None
  if params.input.reference is not None:
    from dxtbx.serialize import load
    try:
      reference_experiments = load.experiment_list(
        params.input.reference, check_format=False)
      assert len(reference_experiments.detectors()) == 1
      reference_detector = reference_experiments.detectors()[0]
    except Exception, e:
      reference_datablocks = load.datablock(params.input.reference)
      assert len(reference_datablocks) == 1
      imageset = reference_datablocks[0].extract_imagesets()[0]
      reference_detector = imageset.get_detector()
예제 #39
0
파일: dials_import.py 프로젝트: dials/dials
  def load_reference_geometry(self, params):
    '''
    Load a reference geoetry file

    '''
    from collections import namedtuple
    # Load reference geometry
    reference_detector = None
    reference_beam = None
    if params.input.reference_geometry is not None:
      from dxtbx.serialize import load
      try:
        experiments = load.experiment_list(
          params.input.reference_geometry, check_format=False)
        assert len(experiments.detectors()) == 1
        assert len(experiments.beams()) == 1
        reference_detector = experiments.detectors()[0]
        reference_beam = experiments.beams()[0]
      except Exception, e:
        datablock = load.datablock(params.input.reference_geometry)
        assert len(datablock) == 1
        imageset = datablock[0].extract_imagesets()[0]
        reference_detector = imageset.get_detector()
        reference_beam = imageset.get_beam()
예제 #40
0
def load_input(exp_path, ref_path):
  refs = load_dials.reflections(ref_path)
  exp = load_dxtbx.experiment_list(exp_path , check_format=False)[0]
  return refs, exp
예제 #41
0
    dials_regression = libtbx.env.dist_path('dials_regression')
  except KeyError, e:
    print 'FAIL: dials_regression not configured'
    exit(0)

  orig_expt_json = os.path.join(
    dials_regression, "experiment_test_data/kappa_experiments.json")

  new_expt_json = os.path.join(os.getcwd(), 'modified_experiments.json')

  cmd = "dials.modify_geometry %s angles=10,20,30" %orig_expt_json
  result = easy_run.fully_buffered(cmd).raise_if_errors()

  from dxtbx.serialize import load
  assert os.path.exists(orig_expt_json), orig_expt_json
  orig_expt = load.experiment_list(orig_expt_json, check_format=False)
  assert os.path.exists(new_expt_json), new_expt_json
  new_expt = load.experiment_list(new_expt_json, check_format=False)

  orig_gonio = orig_expt.goniometers()[0]
  new_gonio = new_expt.goniometers()[0]
  assert approx_equal(orig_gonio.get_angles(), [0,180,0])
  assert approx_equal(new_gonio.get_angles(), [10,20,30])

  return


if __name__ == '__main__':
  from dials.test import cd_auto
  with cd_auto(__file__):
    run()
예제 #42
0
def run(args):
    import libtbx.load_env
    from libtbx.utils import Sorry

    usage = "%s [options] experiments.json indexed.pickle" % libtbx.env.dispatcher_name

    parser = OptionParser(
        usage=usage,
        phil=phil_scope,
        read_reflections=True,
        read_experiments=True,
        check_format=False,
        epilog=help_message,
    )

    params, options = parser.parse_args(show_diff_phil=True)

    reflections = flatten_reflections(params.input.reflections)
    experiments = flatten_experiments(params.input.experiments)
    if len(experiments) == 0 and len(reflections) == 0:
        parser.print_help()
        return
    elif len(experiments.crystals()) > 1:
        raise Sorry("Only one crystal can be processed at a time")
    if params.change_of_basis_op is None:
        raise Sorry("Please provide a change_of_basis_op.")

    reference_crystal = None
    if params.reference is not None:
        from dxtbx.serialize import load

        reference_experiments = load.experiment_list(params.reference, check_format=False)
        assert len(reference_experiments.crystals()) == 1
        reference_crystal = reference_experiments.crystals()[0]

    if len(experiments) and params.change_of_basis_op is libtbx.Auto:
        if reference_crystal is not None:
            from dials.algorithms.indexing.compare_orientation_matrices import (
                difference_rotation_matrix_and_euler_angles,
            )

            cryst = experiments.crystals()[0]
            R, euler_angles, change_of_basis_op = difference_rotation_matrix_and_euler_angles(cryst, reference_crystal)
            print "Change of basis op: %s" % change_of_basis_op
            print "Rotation matrix to transform input crystal to reference::"
            print R.mathematica_form(format="%.3f", one_row_per_line=True)
            print "Euler angles (xyz): %.2f, %.2f, %.2f" % euler_angles

        elif len(reflections):
            assert len(reflections) == 1

            # always re-map reflections to reciprocal space
            from dials.algorithms.indexing import indexer

            refl_copy = flex.reflection_table()
            for i, imageset in enumerate(experiments.imagesets()):
                if "imageset_id" in reflections[0]:
                    sel = reflections[0]["imageset_id"] == i
                else:
                    sel = reflections[0]["id"] == i
                refl = indexer.indexer_base.map_spots_pixel_to_mm_rad(
                    reflections[0].select(sel), imageset.get_detector(), imageset.get_scan()
                )

                indexer.indexer_base.map_centroids_to_reciprocal_space(
                    refl, imageset.get_detector(), imageset.get_beam(), imageset.get_goniometer()
                )
                refl_copy.extend(refl)

            # index the reflection list using the input experiments list
            refl_copy["id"] = flex.int(len(refl_copy), -1)
            from dials.algorithms.indexing import index_reflections

            index_reflections(refl_copy, experiments, tolerance=0.2)
            hkl_expt = refl_copy["miller_index"]
            hkl_input = reflections[0]["miller_index"]

            change_of_basis_op = derive_change_of_basis_op(hkl_input, hkl_expt)

            # reset experiments list since we don't want to reindex this
            experiments = []

    else:
        change_of_basis_op = sgtbx.change_of_basis_op(params.change_of_basis_op)

    if len(experiments):
        experiment = experiments[0]
        cryst_orig = copy.deepcopy(experiment.crystal)
        cryst_reindexed = cryst_orig.change_basis(change_of_basis_op)
        if params.space_group is not None:
            a, b, c = cryst_reindexed.get_real_space_vectors()
            cryst_reindexed = crystal_model(a, b, c, space_group=params.space_group.group())
        experiment.crystal.update(cryst_reindexed)

        print "Old crystal:"
        print cryst_orig
        print
        print "New crystal:"
        print cryst_reindexed
        print

        print "Saving reindexed experimental models to %s" % params.output.experiments
        dump.experiment_list(experiments, params.output.experiments)

    if len(reflections):
        assert len(reflections) == 1
        reflections = reflections[0]

        miller_indices = reflections["miller_index"]

        if params.hkl_offset is not None:
            h, k, l = miller_indices.as_vec3_double().parts()
            h += params.hkl_offset[0]
            k += params.hkl_offset[1]
            l += params.hkl_offset[2]
            miller_indices = flex.miller_index(h.iround(), k.iround(), l.iround())
        non_integral_indices = change_of_basis_op.apply_results_in_non_integral_indices(miller_indices)
        if non_integral_indices.size() > 0:
            print "Removing %i/%i reflections (change of basis results in non-integral indices)" % (
                non_integral_indices.size(),
                miller_indices.size(),
            )
        sel = flex.bool(miller_indices.size(), True)
        sel.set_selected(non_integral_indices, False)
        miller_indices_reindexed = change_of_basis_op.apply(miller_indices.select(sel))
        reflections["miller_index"].set_selected(sel, miller_indices_reindexed)
        reflections["miller_index"].set_selected(~sel, (0, 0, 0))

        print "Saving reindexed reflections to %s" % params.output.reflections
        easy_pickle.dump(params.output.reflections, reflections)
예제 #43
0
파일: DialsRefiner.py 프로젝트: hainm/xia2
  def _refine(self):

    for epoch, idxr in self._refinr_indexers.iteritems():
      # decide what images we are going to process, if not already
      # specified
      #if not self._intgr_wedge:
        #images = self.get_matching_images()
        #self.set_integrater_wedge(min(images),
                    #max(images))

      #Debug.write('DIALS INTEGRATE PREPARE:')
      #Debug.write('Wavelength: %.6f' % self.get_wavelength())
      #Debug.write('Distance: %.2f' % self.get_distance())

      #if not self._intgr_indexer:
        #self.set_integrater_indexer(DialsIndexer())
        #self.get_integrater_indexer().set_indexer_sweep(
        #self.get_integrater_sweep())

        #self._intgr_indexer.set_working_directory(
        #self.get_working_directory())

        #self._intgr_indexer.setup_from_imageset(self.get_imageset())

        #if self.get_frame_wedge():
        #wedge = self.get_frame_wedge()
        #Debug.write('Propogating wedge limit: %d %d' % wedge)
        #self._intgr_indexer.set_frame_wedge(wedge[0], wedge[1],
                          #apply_offset = False)

        ## this needs to be set up from the contents of the
        ## Integrater frame processer - wavelength &c.

        #if self.get_beam_centre():
        #self._intgr_indexer.set_beam_centre(self.get_beam_centre())

        #if self.get_distance():
        #self._intgr_indexer.set_distance(self.get_distance())

        #if self.get_wavelength():
        #self._intgr_indexer.set_wavelength(
          #self.get_wavelength())

      # get the unit cell from this indexer to initiate processing
      # if it is new... and also copy out all of the information for
      # the Dials indexer if not...

      experiments = idxr.get_indexer_experiment_list()

      indexed_experiments = idxr.get_indexer_payload("experiments_filename")
      indexed_reflections = idxr.get_indexer_payload("indexed_filename")

      if len(experiments) > 1:
        xsweeps = idxr._indxr_sweeps
        assert len(xsweeps) == len(experiments)
        assert len(self._refinr_sweeps) == 1 # don't currently support joint refinement
        xsweep = self._refinr_sweeps[0]
        i = xsweeps.index(xsweep)
        experiments = experiments[i:i+1]

        # Extract and output experiment and reflections for current sweep
        indexed_experiments = os.path.join(
          self.get_working_directory(),
          "%s_indexed_experiments.json" %xsweep.get_name())
        indexed_reflections = os.path.join(
          self.get_working_directory(),
          "%s_indexed_reflections.pickle" %xsweep.get_name())

        from dxtbx.serialize import dump
        dump.experiment_list(experiments, indexed_experiments)

        from libtbx import easy_pickle
        from scitbx.array_family import flex
        reflections = easy_pickle.load(
          idxr.get_indexer_payload("indexed_filename"))
        sel = reflections['id'] == i
        assert sel.count(True) > 0
        imageset_id = reflections['imageset_id'].select(sel)
        assert imageset_id.all_eq(imageset_id[0])
        sel = reflections['imageset_id'] == imageset_id[0]
        reflections = reflections.select(sel)
        # set indexed reflections to id == 0 and imageset_id == 0
        reflections['id'].set_selected(reflections['id'] == i, 0)
        reflections['imageset_id'] = flex.int(len(reflections), 0)
        easy_pickle.dump(indexed_reflections, reflections)

      assert len(experiments.crystals()) == 1 # currently only handle one lattice/sweep
      crystal_model = experiments.crystals()[0]
      lattice = idxr.get_indexer_lattice()

      # check if the lattice was user assigned...
      user_assigned = idxr.get_indexer_user_input_lattice()

      # XXX check that the indexer is an Dials indexer - if not then
      # create one...

      # set a low resolution limit (which isn't really used...)
      # this should perhaps be done more intelligently from an
      # analysis of the spot list or something...?

      #if not self.get_integrater_low_resolution():

        #dmax = idxr.get_indexer_low_resolution()
        #self.set_integrater_low_resolution(dmax)

        #Debug.write('Low resolution set to: %s' % \
              #self.get_integrater_low_resolution())

      ## copy the data across
      from dxtbx.serialize import load, dump

      refiner = self.Refine()
      refiner.set_experiments_filename(indexed_experiments)
      refiner.set_indexed_filename(indexed_reflections)

      # XXX Temporary workaround for dials.refine error for scan_varying
      # refinement with smaller wedges
      total_phi_range = idxr._indxr_imagesets[0].get_scan().get_oscillation_range()[1]
      if total_phi_range < 5: # arbitrary value
        refiner.set_scan_varying(False)
      elif total_phi_range < 36:
        refiner.set_interval_width_degrees(total_phi_range/2)

      FileHandler.record_log_file('%s REFINE' % idxr.get_indexer_full_name(),
                                  refiner.get_log_file())
      refiner.run()
      self._refinr_experiments_filename \
        = refiner.get_refined_experiments_filename()
      experiments = load.experiment_list(self._refinr_experiments_filename)
      self._refinr_indexed_filename = refiner.get_refined_filename()
      self.set_refiner_payload("experiments.json", self._refinr_experiments_filename)
      self.set_refiner_payload("reflections.pickle", self._refinr_indexed_filename)

      # this is the result of the cell refinement
      self._refinr_cell = experiments.crystals()[0].get_unit_cell().parameters()
예제 #44
0
  def _integrate_prepare(self):
    '''Prepare for integration - in XDS terms this may mean rerunning
    IDXREF to get the XPARM etc. DEFPIX is considered part of the full
    integration as it is resolution dependent.'''

    from xia2.Handlers.Citations import Citations
    Citations.cite('dials')

    # decide what images we are going to process, if not already
    # specified
    if not self._intgr_wedge:
      images = self.get_matching_images()
      self.set_integrater_wedge(min(images),
                                max(images))

    Debug.write('DIALS INTEGRATE PREPARE:')
    Debug.write('Wavelength: %.6f' % self.get_wavelength())
    Debug.write('Distance: %.2f' % self.get_distance())

    #if not self._intgr_indexer:
      #self.set_integrater_indexer(DialsIndexer())
      #self.get_integrater_indexer().set_indexer_sweep(
          #self.get_integrater_sweep())

      #self._intgr_indexer.set_working_directory(
          #self.get_working_directory())

      #self._intgr_indexer.setup_from_imageset(self.get_imageset())

      #if self.get_frame_wedge():
        #wedge = self.get_frame_wedge()
        #Debug.write('Propogating wedge limit: %d %d' % wedge)
        #self._intgr_indexer.set_frame_wedge(wedge[0], wedge[1],
                                            #apply_offset = False)

      ## this needs to be set up from the contents of the
      ## Integrater frame processer - wavelength &c.

      #if self.get_beam_centre():
        #self._intgr_indexer.set_beam_centre(self.get_beam_centre())

      #if self.get_distance():
        #self._intgr_indexer.set_distance(self.get_distance())

      #if self.get_wavelength():
        #self._intgr_indexer.set_wavelength(
            #self.get_wavelength())

    ## get the unit cell from this indexer to initiate processing
    ## if it is new... and also copy out all of the information for
    ## the Dials indexer if not...

    #experiments = self._intgr_indexer.get_indexer_experiment_list()
    #assert len(experiments) == 1 # currently only handle one lattice/sweep
    #experiment = experiments[0]
    #crystal_model = experiment.crystal
    #lattice = self._intgr_indexer.get_indexer_lattice()

    ## check if the lattice was user assigned...
    #user_assigned = self._intgr_indexer.get_indexer_user_input_lattice()

    # XXX check that the indexer is an Dials indexer - if not then
    # create one...

    # set a low resolution limit (which isn't really used...)
    # this should perhaps be done more intelligently from an
    # analysis of the spot list or something...?

    if not self.get_integrater_low_resolution():

      dmax = self._intgr_refiner.get_indexer_low_resolution(
        self.get_integrater_epoch())
      self.set_integrater_low_resolution(dmax)

      Debug.write('Low resolution set to: %s' % \
                  self.get_integrater_low_resolution())

    ## copy the data across
    from dxtbx.serialize import load

    refiner = self.get_integrater_refiner()
    self._intgr_experiments_filename = refiner.get_refiner_payload(
      "experiments.json")
    experiments = load.experiment_list(self._intgr_experiments_filename)
    experiment = experiments[0]
    self._intgr_indexed_filename = refiner.get_refiner_payload(
      "reflections.pickle")

    # this is the result of the cell refinement
    self._intgr_cell = experiment.crystal.get_unit_cell().parameters()

    Debug.write('Files available at the end of DIALS integrate prepare:')
    for f in self._data_files.keys():
      Debug.write('%s' % f)

    self.set_detector(experiment.detector)
    self.set_beam_obj(experiment.beam)
    self.set_goniometer(experiment.goniometer)
예제 #45
0
파일: Index.py 프로젝트: xia2/xia2
    def run(self, method):
      from xia2.Handlers.Streams import Debug
      Debug.write('Running dials.index')

      self.clear_command_line()
      for f in self._sweep_filenames:
        self.add_command_line(f)
      for f in self._spot_filenames:
        self.add_command_line(f)
      self.add_command_line('indexing.method=%s' % method)
      nproc = PhilIndex.params.xia2.settings.multiprocessing.nproc
      self.set_cpu_threads(nproc)
      self.add_command_line('indexing.nproc=%i' % nproc)
      if PhilIndex.params.xia2.settings.small_molecule == True:
        self.add_command_line('filter_ice=false')
      if self._reflections_per_degree is not None:
        self.add_command_line(
          'reflections_per_degree=%i' %self._reflections_per_degree)
      if self._fft3d_n_points is not None:
        self.add_command_line(
          'fft3d.reciprocal_space_grid.n_points=%i' %self._fft3d_n_points)
      if self._close_to_spindle_cutoff is not None:
        self.add_command_line(
          'close_to_spindle_cutoff=%f' %self._close_to_spindle_cutoff)
      if self._outlier_algorithm:
        self.add_command_line('outlier.algorithm=%s' % self._outlier_algorithm)
      if self._max_cell:
        self.add_command_line('max_cell=%d' % self._max_cell)
      if self._min_cell:
        self.add_command_line('min_cell=%d' % self._min_cell)
      if self._histogram_binning is not None:
        self.add_command_line('max_cell_estimation.histogram_binning=%s' %self._histogram_binning)
      if self._d_min_start:
        self.add_command_line('d_min_start=%f' % self._d_min_start)
      if self._indxr_input_lattice is not None:
        from xia2.Experts.SymmetryExpert import lattice_to_spacegroup_number
        self._symm = lattice_to_spacegroup_number(
            self._indxr_input_lattice)
        self.add_command_line('known_symmetry.space_group=%s' % self._symm)
      if self._indxr_input_cell is not None:
        self.add_command_line(
          'known_symmetry.unit_cell="%s,%s,%s,%s,%s,%s"' %self._indxr_input_cell)
      if self._maximum_spot_error:
        self.add_command_line('maximum_spot_error=%.f' %
                              self._maximum_spot_error)
      if self._detector_fix:
        self.add_command_line('detector.fix=%s' % self._detector_fix)
      if self._beam_fix:
        self.add_command_line('beam.fix=%s' % self._beam_fix)
      if self._phil_file is not None:
        self.add_command_line("%s" %self._phil_file)

      self._experiment_filename = os.path.join(
        self.get_working_directory(), '%d_experiments.json' %self.get_xpid())
      self._indexed_filename = os.path.join(
        self.get_working_directory(), '%d_indexed.pickle' %self.get_xpid())
      self.add_command_line("output.experiments=%s" %self._experiment_filename)
      self.add_command_line("output.reflections=%s" %self._indexed_filename)

      self.start()
      self.close_wait()
      self.check_for_errors()

      from dials.array_family import flex
      from dxtbx.serialize import load
      self._experiment_list = load.experiment_list(self._experiment_filename)
      self._reflections = flex.reflection_table.from_pickle(
        self._indexed_filename)

      crystal = self._experiment_list.crystals()[0]
      self._p1_cell = crystal.get_unit_cell().parameters()

      refined_sel = self._reflections.get_flags(self._reflections.flags.used_in_refinement)
      refl = self._reflections.select(refined_sel)
      xc, yc, zc = refl['xyzcal.px'].parts()
      xo, yo, zo = refl['xyzobs.px.value'].parts()
      import math
      self._nref = refl.size()
      self._rmsd_x = math.sqrt(flex.mean(flex.pow2(xc - xo)))
      self._rmsd_y = math.sqrt(flex.mean(flex.pow2(yc - yo)))
      self._rmsd_z = math.sqrt(flex.mean(flex.pow2(zc - zo)))

      return
예제 #46
0
def run_once(directory):
  from dxtbx.serialize import load
  sweep_dir = os.path.basename(directory)
  print sweep_dir

  datablock_name = os.path.join(directory, "datablock.json")
  if not os.path.exists(datablock_name):
    # this is what xia2 calls it:
    datablock_name = os.path.join(directory, "datablock_import.json")
  strong_spots_name = os.path.join(directory, "strong.pickle")
  experiments_name = os.path.join(directory, "experiments.json")
  indexed_spots_name = os.path.join(directory, "indexed.pickle")
  unindexed_spots_name = os.path.join(directory, "unindexed.pickle")
  if not (os.path.exists(datablock_name) and os.path.exists(strong_spots_name)):
    return

  datablock = load.datablock(datablock_name)
  assert len(datablock) == 1
  if len(datablock[0].extract_sweeps()) == 0:
    print "Skipping %s" %directory
    return
  sweep = datablock[0].extract_sweeps()[0]
  template = sweep.get_template()

  strong_spots = easy_pickle.load(strong_spots_name)
  n_strong_spots = len(strong_spots)
  if os.path.exists(experiments_name):
    experiments = load.experiment_list(experiments_name)
    n_indexed_lattices = len(experiments)
  else:
    experiments = None
    n_indexed_lattices = 0

  g = glob.glob(os.path.join(directory, "xds*", "run_2", "INTEGRATE.HKL"))
  n_integrated_lattices = len(g)

  if os.path.exists(indexed_spots_name):
    indexed_spots = easy_pickle.load(indexed_spots_name)
  else:
    indexed_spots = None
    g = glob.glob(os.path.join(directory, "indexed_*.pickle"))
    if len(g):
      for path in g:
        if indexed_spots is None:
          indexed_spots = easy_pickle.load(path)
        else:
          indexed_spots.extend(easy_pickle.load(path))

  if os.path.exists(unindexed_spots_name):
    unindexed_spots = easy_pickle.load(unindexed_spots_name)
    n_unindexed_spots = len(unindexed_spots)
  else:
    n_unindexed_spots = 0

  # calculate estimated d_min for sweep based on 95th percentile
  from dials.algorithms.indexing import indexer
  detector = sweep.get_detector()
  scan = sweep.get_scan()
  beam = sweep.get_beam()
  goniometer = sweep.get_goniometer()
  if len(strong_spots) == 0:
    d_strong_spots_99th_percentile = 0
    d_strong_spots_95th_percentile = 0
    d_strong_spots_50th_percentile = 0
    n_strong_spots_dmin_4 = 0
  else:
    spots_mm = indexer.indexer_base.map_spots_pixel_to_mm_rad(
      strong_spots, detector, scan)
    indexer.indexer_base.map_centroids_to_reciprocal_space(
      spots_mm, detector, beam, goniometer)
    d_spacings = 1/spots_mm['rlp'].norms()
    perm = flex.sort_permutation(d_spacings, reverse=True)
    d_spacings_sorted = d_spacings.select(perm)
    percentile_99th = int(math.floor(0.99 * len(d_spacings)))
    percentile_95th = int(math.floor(0.95 * len(d_spacings)))
    percentile_50th = int(math.floor(0.5 * len(d_spacings)))
    d_strong_spots_99th_percentile = d_spacings_sorted[percentile_99th]
    d_strong_spots_95th_percentile = d_spacings_sorted[percentile_95th]
    d_strong_spots_50th_percentile = d_spacings_sorted[percentile_50th]
    n_strong_spots_dmin_4 = (d_spacings >= 4).count(True)

  cell_params = flex.sym_mat3_double()
  n_indexed = flex.double()
  d_min_indexed = flex.double()
  rmsds = flex.vec3_double()
  sweep_dir_cryst = flex.std_string()
  if experiments is not None:
    for i, experiment in enumerate(experiments):
      sweep_dir_cryst.append(sweep_dir)
      crystal_model = experiment.crystal
      unit_cell = crystal_model.get_unit_cell()
      space_group = crystal_model.get_space_group()
      crystal_symmetry = crystal.symmetry(unit_cell=unit_cell,
                                          space_group=space_group)
      cb_op_reference_setting =  crystal_symmetry.change_of_basis_op_to_reference_setting()
      crystal_symmetry_reference_setting = crystal_symmetry.change_basis(
        cb_op_reference_setting)
      cell_params.append(crystal_symmetry_reference_setting.unit_cell().parameters())
      spots_mm = indexed_spots.select(indexed_spots['id'] == i)
      n_indexed.append(len(spots_mm))
      if len(spots_mm) == 0:
        d_min_indexed.append(0)
      else:
        indexer.indexer_base.map_centroids_to_reciprocal_space(
          spots_mm, detector, beam, goniometer)
        d_spacings = 1/spots_mm['rlp'].norms()
        perm = flex.sort_permutation(d_spacings, reverse=True)
        d_min_indexed.append(d_spacings[perm[-1]])
      try:
        rmsds.append(get_rmsds_obs_pred(spots_mm, experiment))
      except Exception, e:
        print e
        rmsds.append((-1,-1,-1))
        continue
예제 #47
0
  def __init__(self,
               pickle_path,
               sweep_path,
               extra_args,
               expected_unit_cell,
               expected_rmsds,
               expected_hall_symbol,
               n_expected_lattices=1,
               relative_length_tolerance=0.005,
               absolute_angle_tolerance=0.5):

    args = ["dials.index", pickle_path, sweep_path] + extra_args

    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_index")
    os.chdir(tmp_dir)
    command = " ".join(args)
    print command
    result = easy_run.fully_buffered(command=command).raise_if_errors()
    os.chdir(cwd)
    assert os.path.exists(os.path.join(tmp_dir, "experiments.json"))
    experiments_list = dxtbx_load.experiment_list(
      os.path.join(tmp_dir, "experiments.json"), check_format=False)
    assert len(experiments_list.crystals()) == n_expected_lattices, (
      len(experiments_list.crystals()), n_expected_lattices)
    assert os.path.exists(os.path.join(tmp_dir, "indexed.pickle"))
    from libtbx.utils import time_log
    unpickling_timer = time_log("unpickling")
    self.calc_rmsds_timer = time_log("calc_rmsds")
    unpickling_timer.start()
    self.indexed_reflections = load.reflections(os.path.join(tmp_dir, "indexed.pickle"))
    unpickling_timer.stop()
    for i in range(len(experiments_list)):
      experiment = experiments_list[i]
      self.crystal_model = experiment.crystal
      #assert self.crystal_model.get_unit_cell().is_similar_to(
        #expected_unit_cell,
        #relative_length_tolerance=relative_length_tolerance,
        #absolute_angle_tolerance=absolute_angle_tolerance), (
          #self.crystal_model.get_unit_cell().parameters(),
          #expected_unit_cell.parameters())
      assert unit_cells_are_similar(
        self.crystal_model.get_unit_cell(),expected_unit_cell,
        relative_length_tolerance=relative_length_tolerance,
        absolute_angle_tolerance=absolute_angle_tolerance), (
          self.crystal_model.get_unit_cell().parameters(),
          expected_unit_cell.parameters())
      sg = self.crystal_model.get_space_group()
      assert sg.type().hall_symbol() == expected_hall_symbol, (
        sg.type().hall_symbol(), expected_hall_symbol)
      reflections = self.indexed_reflections.select(
        self.indexed_reflections['id'] == i)
      mi = reflections['miller_index']
      assert (mi != (0,0,0)).count(False) == 0
      reflections = reflections.select(mi != (0,0,0))
      self.rmsds = self.get_rmsds_obs_pred(reflections, experiment)
      for actual, expected in zip(self.rmsds, expected_rmsds):
        assert actual <= expected, "%s %s" %(self.rmsds, expected_rmsds)
    if 0:
      print self.calc_rmsds_timer.legend
      print unpickling_timer.report()
      print self.calc_rmsds_timer.report()
예제 #48
0
파일: DialsIndexer.py 프로젝트: xia2/xia2
                                  rbs.get_log_file())
      rbs.run()

      from cctbx import crystal, sgtbx

      for k in sorted(rbs.get_bravais_summary()):
        summary = rbs.get_bravais_summary()[k]

        # FIXME need to do this better - for the moment only accept lattices
        # where R.M.S. deviation is less than twice P1 R.M.S. deviation.

        if self._indxr_input_lattice is None:
          if not summary['recommended']:
            continue

        experiments = load.experiment_list(
          summary['experiments_file'], check_format=False)
        cryst = experiments.crystals()[0]
        cs = crystal.symmetry(unit_cell=cryst.get_unit_cell(),
                              space_group=cryst.get_space_group())
        cb_op_best_to_ref = cs.change_of_basis_op_to_reference_setting()
        cs_reference = cs.change_basis(cb_op_best_to_ref)
        lattice = str(bravais_types.bravais_lattice(
          group=cs_reference.space_group()))
        cb_op = cb_op_best_to_ref * sgtbx.change_of_basis_op(str(summary['cb_op']))

        self._solutions[k] = {
          'number':k,
          'mosaic':0.0,
          'metric':summary['max_angular_difference'],
          'rmsd':summary['rmsd'],
          'nspots':summary['nspots'],