Beispiel #1
0
def test_experimentlist_with_identifiers():
    # Initialise a list of experiments
    experiments = ExperimentList()

    experiments.append(
        Experiment(beam=Beam(s0=(0, 0, -1)),
                   detector=Detector(),
                   identifier="bacon"))

    experiments.append(
        Experiment(beam=Beam(s0=(0, 0, -1)),
                   detector=Detector(),
                   identifier="sausage"))

    with pytest.raises(Exception):
        experiments.append(
            Experiment(beam=Beam(), detector=Detector(), identifier="bacon"))

    d = experiments.to_dict()
    e2 = ExperimentListDict(d).decode()

    assert experiments[0].identifier == e2[0].identifier
    assert experiments[1].identifier == e2[1].identifier

    assert tuple(experiments.identifiers()) == ("bacon", "sausage")
    experiments[0].identifier = "spam"
    assert tuple(experiments.identifiers()) == ("spam", "sausage")

    experiments.append(Experiment(identifier="bacon"))
    experiments.select_on_experiment_identifiers(["spam", "bacon"])
    assert list(experiments.identifiers()) == ["spam", "bacon"]
    experiments.append(Experiment(identifier="ham"))
    experiments.append(Experiment(identifier="jam"))
    experiments.remove_on_experiment_identifiers(["spam", "jam"])
    assert list(experiments.identifiers()) == ["bacon", "ham"]
Beispiel #2
0
    def refine(self, exps, ref, refining_predictions=False, best=None):
        exps_out = exps
        if not self.params.skip_hopper:
            if self.params.dispatch.refine:
                print("WARNING: hopper_process will always run its own refinement, ignoring dials.refine phil scope")
            self.params.dispatch.refine = False
            assert len(exps) == 1
            if self.params.reidx_obs:
                exps, ref = self._reindex_obs(exps, self.observed)

            exp, ref, self.stage1_modeler, x = hopper_utils.refine(exps[0], ref,
                                               self.params.diffBragg,
                                               spec=self.params.refspec,
                                               gpu_device=self.device_id, return_modeler=True, best=best)
            orig_exp_name = os.path.abspath(self.params.output.refined_experiments_filename)
            refls_name = os.path.abspath(self.params.output.indexed_filename)
            self.params.diffBragg.outdir = self.params.output.output_dir
            # TODO: what about composite mode ?
            self.stage1_df = save_to_pandas(x, self.stage1_modeler.SIM, orig_exp_name, self.params.diffBragg,
                                            self.stage1_modeler.E, 0, refls_name, None)
            exps_out = ExperimentList()
            exps_out.append(exp)

            basename = os.path.splitext(os.path.basename(refls_name))[0]
            self._save_modeler_info(basename)

        return super(Hopper_Processor, self).refine(exps_out, ref)
def experiments():
    """Make a list of three empty experiments"""
    experiments = ExperimentList()
    experiments.append(Experiment())
    experiments.append(Experiment())
    experiments.append(Experiment())
    return experiments
Beispiel #4
0
def index(
    experiments: ExperimentList,
    observed: flex.reflection_table,
    params: phil.scope_extract,
) -> Tuple[ExperimentList, flex.reflection_table, dict]:

    reflection_tables, params, method_list = preprocess(
        experiments, observed, params)

    # Do the indexing and generate a summary of results
    indexed_experiments, indexed_reflections, results_summary = index_all_concurrent(
        experiments,
        reflection_tables,
        params,
        method_list,
    )

    # combine beam and detector models if not already
    if (len(indexed_experiments.detectors())) > 1 or (len(
            indexed_experiments.beams())) > 1:
        combine = CombineWithReference(
            detector=indexed_experiments[0].detector,
            beam=indexed_experiments[0].beam)
        elist = ExperimentList()
        for expt in indexed_experiments:
            elist.append(combine(expt))
        indexed_experiments = elist

    return indexed_experiments, indexed_reflections, results_summary
Beispiel #5
0
def test_change_of_basis_ops_to_minimum_cell_mpro():
    input_ucs = [
        (46.023, 55.001, 64.452, 64.744, 78.659, 89.824),
        (44.747, 53.916, 62.554, 114.985, 99.610, 90.736),
    ]

    # Setup the input experiments and reflection tables
    expts = ExperimentList()
    for uc in input_ucs:
        uc = uctbx.unit_cell(uc)
        sg = sgtbx.space_group_info("P1").group()
        B = scitbx.matrix.sqr(uc.fractionalization_matrix()).transpose()
        expts.append(
            Experiment(crystal=Crystal(B, space_group=sg, reciprocal=True)))

    # Actually run the method we are testing
    cb_ops = change_of_basis_ops_to_minimum_cell(
        expts,
        max_delta=5,
        relative_length_tolerance=0.05,
        absolute_angle_tolerance=2)
    expts.change_basis(cb_ops, in_place=True)
    assert symmetry.unit_cells_are_similar_to(
        expts,
        median_unit_cell(expts),
        relative_length_tolerance=0.05,
        absolute_angle_tolerance=2,
    )
Beispiel #6
0
def index(experiments, observed, params):
    params.refinement.parameterisation.scan_varying = False
    params.indexing.stills.indexer = "stills"

    reflections = observed.split_by_experiment_id()
    # Calculate necessary quantities
    for refl, experiment in zip(reflections, experiments):
        elist = ExperimentList([experiment])
        refl["imageset_id"] = flex.int(refl.size(),
                                       0)  # needed for centroid_px_to_mm
        refl.centroid_px_to_mm(elist)
        refl.map_centroids_to_reciprocal_space(elist)

    if (params.indexing.max_cell is
            Auto) and (not params.indexing.known_symmetry.unit_cell):
        max_cells = []
        for refl in reflections:
            try:
                max_cells.append(find_max_cell(refl).max_cell)
            except (DialsIndexError, AssertionError):
                pass
        logger.info(f"Setting max cell to {max(max_cells):.1f} " + "\u212B")
        params.indexing.max_cell = max(max_cells)

    method_list = params.method
    if "real_space_grid_search" in method_list:
        if not params.indexing.known_symmetry.unit_cell:
            logger.info(
                "No unit cell given, real_space_grid_search will not be used")
            method_list.remove("real_space_grid_search")
    methods = ", ".join(method_list)
    pl = "s" if (len(method_list) > 1) else ""
    logger.info(f"Attempting indexing with {methods} method{pl}")

    indexed_experiments, indexed_reflections, results_summary = index_all_concurrent(
        experiments,
        reflections,
        params,
        method_list,
    )

    summary_table = make_summary_table(results_summary)
    logger.info("\nSummary of images sucessfully indexed\n" + summary_table)

    n_images = len(set(e.imageset.get_path(0) for e in indexed_experiments))
    logger.info(
        f"{indexed_reflections.size()} spots indexed on {n_images} images\n")

    # combine beam and detector models if not already
    if (len(indexed_experiments.detectors())) > 1 or (len(
            indexed_experiments.beams())) > 1:
        combine = CombineWithReference(
            detector=indexed_experiments[0].detector,
            beam=indexed_experiments[0].beam)
        elist = ExperimentList()
        for expt in indexed_experiments:
            elist.append(combine(expt))
        indexed_experiments = elist

    return indexed_experiments, indexed_reflections, results_summary
Beispiel #7
0
    def predict_to_miller_set_with_shadow(self, expt, resolution):
        predicted = flex.reflection_table.from_predictions(expt, dmin=resolution)

        # transmogrify this to an ExperimentList from an Experiment
        experiments = ExperimentList()
        experiments.append(expt)
        predicted["id"] = flex.int(predicted.size(), 0)
        shadowed = filter_shadowed_reflections(
            experiments, predicted, experiment_goniometer=True
        )
        predicted = predicted.select(~shadowed)

        hkl = predicted["miller_index"]

        # now get a full set of all unique miller indices
        obs = miller.set(
            crystal_symmetry=crystal.symmetry(
                space_group=expt.crystal.get_space_group(),
                unit_cell=expt.crystal.get_unit_cell(),
            ),
            anomalous_flag=True,
            indices=hkl,
        ).unique_under_symmetry()

        return obs, shadowed
Beispiel #8
0
def test_sort_tables_to_experiments_order_single_dataset_files():
    """Test reflection table sorting when tables contain a single dataset."""
    # Reflection tables in the wrong order
    reflection_tables = [
        mock_reflection_file_object(id_=1).data,
        mock_reflection_file_object(id_=0).data,
    ]
    experiments = ExperimentList()
    experiments.append(Experiment(identifier=str(0)))
    experiments.append(Experiment(identifier=str(1)))
    refls = sort_tables_to_experiments_order(reflection_tables, experiments)

    # Check that reflection tables are rearranged
    assert refls[0] is reflection_tables[1]
    assert refls[1] is reflection_tables[0]
    assert list(refls[0].experiment_identifiers().values()) == ["0"]
    assert list(refls[1].experiment_identifiers().values()) == ["1"]

    # Reflection tables in correct order
    reflection_tables = [
        mock_reflection_file_object(id_=0).data,
        mock_reflection_file_object(id_=1).data,
    ]
    experiments = ExperimentList()
    experiments.append(Experiment(identifier=str(0)))
    experiments.append(Experiment(identifier=str(1)))
    refls = sort_tables_to_experiments_order(reflection_tables, experiments)

    # Check that nothing has been changed
    assert refls[0] is reflection_tables[0]
    assert refls[1] is reflection_tables[1]
    assert list(refls[0].experiment_identifiers().values()) == ["0"]
    assert list(refls[1].experiment_identifiers().values()) == ["1"]
    def predict_to_miller_set_with_shadow(self, expt, resolution):
        from dials.array_family import flex
        from dials.algorithms.shadowing.filter import filter_shadowed_reflections
        predicted = flex.reflection_table.from_predictions(expt,
                                                           dmin=resolution)

        # transmogrify this to an ExperimentList from an Experiment
        from dxtbx.model import ExperimentList
        experiments = ExperimentList()
        experiments.append(expt)
        predicted['id'] = flex.int(predicted.size(), 0)
        shadowed = filter_shadowed_reflections(experiments,
                                               predicted,
                                               experiment_goniometer=True)
        predicted = predicted.select(~shadowed)

        hkl = predicted['miller_index']

        # now get a full set of all unique miller indices
        from cctbx import miller
        from cctbx import crystal

        obs = miller.set(crystal_symmetry=crystal.symmetry(
            space_group=expt.crystal.get_space_group(),
            unit_cell=expt.crystal.get_unit_cell()),
                         anomalous_flag=True,
                         indices=hkl).unique_under_symmetry()

        return obs, shadowed
def save_opt_det(phil_params, x, ref_params, SIM):
    opt_det = get_optimized_detector(x, ref_params, SIM)
    El = ExperimentList()
    E = Experiment()
    E.detector = opt_det
    El.append(E)
    El.as_file(phil_params.geometry.optimized_detector_name)
    print("Saved detector model to %s" %
          phil_params.geometry.optimized_detector_name)
Beispiel #11
0
def test_select_remove_on_experiment_identifiers():

    table = flex.reflection_table()
    table["id"] = flex.int([0, 1, 2, 3])

    experiments = ExperimentList()
    experiments.append(Experiment(identifier="abcd"))
    experiments.append(Experiment(identifier="efgh"))
    experiments.append(Experiment(identifier="ijkl"))
    experiments.append(Experiment(identifier="mnop"))
    table.experiment_identifiers()[0] = "abcd"
    table.experiment_identifiers()[1] = "efgh"
    table.experiment_identifiers()[2] = "ijkl"
    table.experiment_identifiers()[3] = "mnop"

    table.assert_experiment_identifiers_are_consistent(experiments)

    table = table.remove_on_experiment_identifiers(["efgh"])
    del experiments[1]
    table.assert_experiment_identifiers_are_consistent(experiments)

    assert list(table.experiment_identifiers().keys()) == [0, 2, 3]
    assert list(
        table.experiment_identifiers().values()) == ["abcd", "ijkl", "mnop"]

    table = table.select_on_experiment_identifiers(["abcd", "mnop"])
    del experiments[1]  # now ijkl
    table.assert_experiment_identifiers_are_consistent(experiments)
    assert list(table.experiment_identifiers().keys()) == [0, 3]
    assert list(table.experiment_identifiers().values()) == ["abcd", "mnop"]

    # reset 'id' column such that they are numbered 0 .. n-1
    table.reset_ids()
    table.assert_experiment_identifiers_are_consistent(experiments)
    assert list(table.experiment_identifiers().keys()) == [0, 1]
    assert list(table.experiment_identifiers().values()) == ["abcd", "mnop"]
    # test that the function doesn't fail if no identifiers set
    table1 = copy.deepcopy(table)
    for k in table1.experiment_identifiers().keys():
        del table1.experiment_identifiers()[k]
    table1.reset_ids()
    assert list(table1.experiment_identifiers().keys()) == []

    # Test exception is raised if bad choice
    with pytest.raises(KeyError):
        table.remove_on_experiment_identifiers(["efgh"])
    with pytest.raises(KeyError):
        table.select_on_experiment_identifiers(["efgh"])

    table = flex.reflection_table()
    table["id"] = flex.int([0, 1, 2, 3])
    # Test exception is raised if identifiers map not set
    with pytest.raises(KeyError):
        table.remove_on_experiment_identifiers(["efgh"])
    with pytest.raises(KeyError):
        table.select_on_experiment_identifiers(["abcd", "mnop"])
Beispiel #12
0
def test_experimentlist_imagesequence_decode(mocker):
    # These models are shared between experiments
    beam = Beam(s0=(0, 0, -1))
    detector = Detector()
    gonio = Goniometer()

    # Construct the experiment list
    experiments = ExperimentList()
    for i in range(3):
        experiments.append(
            Experiment(
                beam=beam,
                detector=detector,
                scan=ScanFactory.make_scan(
                    image_range=(i + 1, i + 1),
                    exposure_times=[1],
                    oscillation=(0, 0),
                    epochs=[0],
                ),
                goniometer=gonio,
            ))

    # Convert experiment list to dict and manually insert a shared imageset
    d = experiments.to_dict()
    d["imageset"].append({
        "__id__": "ImageSequence",
        "template": "Puck3_10_1_####.cbf.gz"
    })
    for e in d["experiment"]:
        e["imageset"] = 0

    # Monkeypatch this function as we don't actually have an imageset
    make_sequence = mocker.patch.object(ExperimentListDict, "_make_sequence")
    # Ensure that if make_sequence is called more than once it returns a different
    # value each time
    make_sequence.side_effect = lambda *args, **kwargs: mocker.MagicMock()

    # Decode the dict to get a new experiment list
    experiments2 = ExperimentListDict(d).decode()

    # This function should only be called once per imageset
    make_sequence.assert_called_once()

    # Verify that this experiment is as we expect
    assert len(experiments2) == 3
    assert len(experiments2.imagesets()) == 1
    assert len(experiments2.goniometers()) == 1
    assert len(experiments2.detectors()) == 1
    assert len(experiments2.beams()) == 1
    assert len(experiments2.scans()) == 3
    for expt in experiments2:
        assert expt.imageset is experiments2.imagesets()[0]
Beispiel #13
0
def test_exp(idval=0):
    """Test experiments object."""
    experiments = ExperimentList()
    exp_dict = {
        "__id__": "crystal",
        "real_space_a": [1.0, 0.0, 0.0],
        "real_space_b": [0.0, 1.0, 0.0],
        "real_space_c": [0.0, 0.0, 2.0],
        "space_group_hall_symbol": " C 2y",
    }
    crystal = Crystal.from_dict(exp_dict)
    experiments.append(Experiment(crystal=crystal))
    experiments[0].identifier = str(idval)
    return experiments
Beispiel #14
0
def generate_experiments_reflections(
    space_group,
    lattice_group=None,
    unit_cell=None,
    unit_cell_volume=1000,
    seed=0,
    d_min=1,
    sigma=0.1,
    sample_size=100,
    map_to_p1=False,
    twin_fractions=None,
    map_to_minimum=True,
):
    datasets, reindexing_ops = generate_test_data(
        space_group,
        lattice_group=lattice_group,
        unit_cell=unit_cell,
        unit_cell_volume=unit_cell_volume,
        seed=seed,
        d_min=d_min,
        sigma=sigma,
        sample_size=sample_size,
        map_to_p1=map_to_p1,
        twin_fractions=twin_fractions,
        map_to_minimum=map_to_minimum,
    )

    expts = ExperimentList()
    refl_tables = []

    for i, dataset in enumerate(datasets):
        B = scitbx.matrix.sqr(
            dataset.unit_cell().fractionalization_matrix()).transpose()
        expts.append(
            Experiment(
                crystal=Crystal(B,
                                space_group=dataset.space_group(),
                                reciprocal=True),
                scan=Scan(image_range=(0, 180), oscillation=(0.0, 1.0)),
            ))
        refl = flex.reflection_table()
        refl["miller_index"] = dataset.indices()
        refl["id"] = flex.int(refl.size(), i)
        refl["d"] = dataset.d_spacings().data()
        refl["intensity.sum.value"] = dataset.data()
        refl["intensity.sum.variance"] = flex.pow2(dataset.sigmas())
        refl.set_flags(flex.bool(len(refl), True), refl.flags.integrated_sum)
        refl_tables.append(refl)
    return expts, refl_tables, reindexing_ops
Beispiel #15
0
def make_MAD_merged_mtz_file(params, experiments, reflections, wavelengths):
    """Make a multi wavelength merged mtz file from experiments and reflections."""
    # need to add a crystal to the mtz object
    # now go through data selecting on wavelength - loop over each to get mtz_object
    # Create the mtz file

    mtz_writer = MADMergedMTZWriter(experiments[0].crystal.get_space_group(),
                                    experiments[0].crystal.get_unit_cell())

    # now add each wavelength.
    if len(params.output.dataset_names) != len(wavelengths.keys()):
        logger.info(
            "Unequal number of dataset names and wavelengths, using default naming."
        )
        dnames = [None] * len(wavelengths.keys())
    else:
        dnames = params.output.dataset_names
    if len(params.output.crystal_names) != len(wavelengths.keys()):
        logger.info(
            "Unequal number of crystal names and wavelengths, using default naming."
        )
        cnames = [None] * len(wavelengths.keys())
    else:
        cnames = params.output.crystal_names

    for dname, cname, (wavelength, exp_nos) in zip(dnames, cnames,
                                                   wavelengths.items()):
        expids = []
        new_exps = ExperimentList()
        for i in exp_nos:
            expids.append(experiments[i].identifier)  # string
            new_exps.append(experiments[i])
        refls = reflections[0].select_on_experiment_identifiers(expids)

        logger.info("Running merge for wavelength: %s", wavelength)
        merged, anom, amplitudes, anom_amp = merge_and_truncate(
            params, new_exps, [refls])
        #### Add each wavelength as a new crystal.
        mtz_writer.add_crystal(crystal_name=cname,
                               project_name=params.output.project_name)
        mtz_writer.add_dataset(wavelength,
                               merged,
                               anom,
                               amplitudes,
                               anom_amp,
                               name=dname)

    return mtz_writer.mtz_file
Beispiel #16
0
def test_change_of_basis_ops_to_minimum_cell_with_outlier():
    symmetries = [
        crystal.symmetry(unit_cell=uc, space_group="P1") for uc in (
            (52.8868, 52.8868, 333.522, 90, 90, 120),
            (52.6503, 53.0292, 333.783, 89.9872, 89.2247, 60.8078),
            (52.9571, 53.0005, 334.255, 90.0493, 90.0042, 119.893),
            (
                54.4465,
                56.5677,
                355.775,
                93.4376,
                90.0999,
                118.256,
            ),  # This is an outlier
            (52.9235, 52.9235, 335.296, 90, 90, 120),
            (53.4531, 53.4531, 322.909, 90, 90, 120),
        )
    ]

    # Setup the input experiments and reflection tables
    expts = ExperimentList()
    for cs in symmetries:
        B = scitbx.matrix.sqr(
            cs.unit_cell().fractionalization_matrix()).transpose()
        expts.append(
            Experiment(crystal=Crystal(
                B, space_group=cs.space_group(), reciprocal=True)))

    # Actually run the method we are testing
    cb_ops = change_of_basis_ops_to_minimum_cell(
        expts,
        max_delta=5,
        relative_length_tolerance=0.05,
        absolute_angle_tolerance=2)
    assert cb_ops.count(None) == 1
    assert cb_ops[3] is None
    expts = ExperimentList(
        [expt for expt, cb_op in zip(expts, cb_ops) if cb_op])
    cb_ops = [cb_op for cb_op in cb_ops if cb_op]

    expts.change_basis(cb_ops, in_place=True)
    assert symmetry.unit_cells_are_similar_to(
        expts,
        median_unit_cell(expts),
        relative_length_tolerance=0.05,
        absolute_angle_tolerance=2,
    )
Beispiel #17
0
def test_median_cell():
    unit_cells = [
        uctbx.unit_cell(uc) for uc in [
            (10, 11, 11.9, 90, 85, 90),
            (10.1, 11.2, 12, 90, 85.5, 90),
            (10.2, 11.1, 12, 90, 84.7, 90),
        ]
    ]
    expts = ExperimentList()
    for uc in unit_cells:
        sg = sgtbx.space_group_info("P1").group()
        B = scitbx.matrix.sqr(uc.fractionalization_matrix()).transpose()
        expts.append(
            Experiment(crystal=Crystal(B, space_group=sg, reciprocal=True)))

    median = median_unit_cell(expts)
    assert median.parameters() == pytest.approx((10.1, 11.1, 12, 90, 85, 90))
def test_experiments_multiaxisgonio():
    """Create a mock experiments object."""
    # beam along +z
    gonio_1 = GoniometerFactory.from_dict(
        {
            "axes": [
                [
                    1.0 / sqrt(2.0),
                    0.0,
                    -1.0 / sqrt(2.0),
                ],
                [1.0, 0.0, 0.0],
            ],
            "angles": [0.0, 0.0],
            "names": ["GON_PHI", "GON_OMEGA"],
            "scan_axis": 1,
        }
    )
    gonio_2 = GoniometerFactory.from_dict(
        {
            "axes": [
                [
                    1.0 / sqrt(2.0),
                    0.0,
                    -1.0 / sqrt(2.0),
                ],
                [1.0, 0.0, 0.0],
            ],
            "angles": [0.0, 0.0],
            "names": ["GON_PHI", "GON_OMEGA"],
            "scan_axis": 0,
        }
    )

    experiments = ExperimentList()
    for g in [gonio_1, gonio_2]:
        experiments.append(
            Experiment(
                beam=Beam(s0=(0.0, 0.0, 2.0)),
                goniometer=g,
                scan=Scan(image_range=[1, 90], oscillation=[0.0, 1.0]),
            )
        )

    return experiments
Beispiel #19
0
def generated_exp(n=1):
    """Generate an experiment list with two experiments."""
    experiments = ExperimentList()
    exp_dict = {
        "__id__": "crystal",
        "real_space_a": [1.0, 0.0, 0.0],
        "real_space_b": [0.0, 1.0, 0.0],
        "real_space_c": [0.0, 0.0, 2.0],
        "space_group_hall_symbol": " C 2y",
    }
    for i in range(n):
        experiments.append(
            Experiment(
                scan=Scan(image_range=[1, 25], oscillation=[0.0, 1.0]),
                crystal=Crystal.from_dict(exp_dict),
                identifier=str(i),
            ))
    return experiments
Beispiel #20
0
def test_interpret_images_to_doses_options():
    """Test handling of command line options for experiments input."""
    experiments = ExperimentList()
    experiments.append(Experiment(scan=Scan(image_range=(1, 10), oscillation=(0, 1.0))))
    experiments.append(Experiment(scan=Scan(image_range=(1, 20), oscillation=(0, 1.0))))
    experiments.append(Experiment(scan=Scan(image_range=(1, 10), oscillation=(0, 1.0))))

    # Default
    starting_doses, dpi = dials.pychef.interpret_images_to_doses_options(
        experiments, dose_per_image=[1]
    )
    assert starting_doses == [0, 0, 0]
    assert dpi == [1.0, 1.0, 1.0]

    # Multi-sweep measurements on same crystal
    starting_doses, dpi = dials.pychef.interpret_images_to_doses_options(
        experiments, dose_per_image=[1], shared_crystal=True
    )
    assert starting_doses == [0, 10, 30]
    assert dpi == [1.0, 1.0, 1.0]

    # Specify starting doses
    starting_doses, dpi = dials.pychef.interpret_images_to_doses_options(
        experiments, dose_per_image=[1], starting_doses=[0, 20, 0], shared_crystal=False
    )
    assert starting_doses == [0, 20, 0]
    assert dpi == [1.0, 1.0, 1.0]

    # Specify doses per image and shared crystal
    starting_doses, dpi = dials.pychef.interpret_images_to_doses_options(
        experiments, dose_per_image=[1.0, 2.0, 1.0], shared_crystal=True
    )
    assert starting_doses == [0, 10, 50]
    assert dpi == [1.0, 2.0, 1.0]

    # Test error is raised if bad input values for starting doses or dose per image.
    with pytest.raises(ValueError):
        _, __ = dials.pychef.interpret_images_to_doses_options(
            experiments, dose_per_image=[1.0], starting_doses=[0, 1]
        )
    with pytest.raises(ValueError):
        _, __ = dials.pychef.interpret_images_to_doses_options(
            experiments, dose_per_image=[1.0, 2.0]
        )
def experiment_list():
  from dxtbx.model import Beam, Detector, Goniometer, Scan

  # Initialise a list of experiments
  experiments = ExperimentList()

  # Create a few beams
  b1 = Beam()
  b2 = Beam()
  b3 = Beam()

  # Create a few detectors
  d1 = Detector()
  d2 = Detector()
  d3 = Detector()

  # Create a few goniometers
  g1 = Goniometer()
  g2 = Goniometer()
  g3 = Goniometer()

  # Create a few scans
  s1 = Scan()
  s2 = Scan()
  s3 = Scan()

  # Create a list of models
  b = [b1, b2, b3, b2, b1]
  d = [d1, d2, d3, d2, d1]
  g = [g1, g2, g3, g2, g1]
  s = [s1, s2, s3, s2, s1]
  ident = ["sausage", "eggs", "bacon", "toast", "beans"]

  # Populate with various experiments
  for i in range(5):
    experiments.append(Experiment(
      beam=b[i],
      detector=d[i],
      goniometer=g[i],
      scan=s[i],
      identifier=ident[i]))

  # Return the list of experiments
  return experiments
Beispiel #22
0
 def from_stills_and_crystal(imageset, crystal, load_models=True):
     """Create an experiment list from stills and crystal."""
     experiments = ExperimentList()
     if load_models:
         for i in range(len(imageset)):
             experiments.append(
                 Experiment(
                     imageset=imageset[i:i + 1],
                     beam=imageset.get_beam(i),
                     detector=imageset.get_detector(i),
                     goniometer=imageset.get_goniometer(i),
                     scan=imageset.get_scan(i),
                     crystal=crystal,
                 ))
     else:
         for i in range(len(imageset)):
             experiments.append(
                 Experiment(imageset=imageset[i:i + 1], crystal=crystal))
     return experiments
Beispiel #23
0
def test_sort_tables_to_experiments_order_multi_dataset_files():
    """Test reflection table sorting when a table contains multiple datasets."""
    # Reflection tables in the wrong order
    reflection_tables = [
        mock_two_reflection_file_object(ids=[1, 2]).data,
        mock_reflection_file_object(id_=0).data,
    ]
    experiments = ExperimentList()
    experiments.append(Experiment(identifier=str(0)))
    experiments.append(Experiment(identifier=str(1)))
    experiments.append(Experiment(identifier=str(2)))

    refls = sort_tables_to_experiments_order(reflection_tables, experiments)

    # Check that reflection tables are rearranged
    assert refls[0] is reflection_tables[1]
    assert refls[1] is reflection_tables[0]
    assert list(refls[0].experiment_identifiers().values()) == ["0"]
    assert list(refls[1].experiment_identifiers().values()) == ["1", "2"]
def test_split_chunk_sizes(tmpdir, option, with_identifiers):

    ids = list(range(0, 5))
    experiments = ExperimentList()
    reflections = flex.reflection_table()
    reflections["id"] = flex.int(ids)
    reflections["intensity"] = flex.double([(i + 1) * 100.0 for i in ids])

    for i in ids:
        exp = generate_exp()
        if with_identifiers:
            exp.identifier = str(i)
            reflections.experiment_identifiers()[i] = str(i)
        experiments.append(exp)

    experiments.as_json(tmpdir.join("tmp.expt").strpath)
    reflections.as_file(tmpdir.join("tmp.refl").strpath)

    result = procrunner.run(
        [
            "dials.split_experiments",
            tmpdir.join("tmp.expt").strpath,
            tmpdir.join("tmp.refl").strpath,
            option,
        ],
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr

    for j, n, intensities in zip(
        [0, 1, 2], [2, 2, 1], [[100.0, 200.0], [300.0, 400.0], [500.0]]
    ):
        assert tmpdir.join("split_%s.refl" % j).check()
        assert tmpdir.join("split_%s.expt" % j).check()
        expts = load.experiment_list(
            tmpdir.join("split_%s.expt" % j), check_format=False
        )
        assert len(expts) == n
        refls = flex.reflection_table.from_file(tmpdir.join("split_%s.refl" % j))
        assert list(set(refls["id"])) == list(range(0, n))
        assert list(refls["intensity"]) == intensities
        refls.assert_experiment_identifiers_are_consistent(expts)
    def prepare_dxtbx_models(self, setting_specific_ai, sg, isoform=None):

        from dxtbx.model import BeamFactory
        beam = BeamFactory.simple(wavelength=self.inputai.wavelength)

        from dxtbx.model import DetectorFactory
        detector = DetectorFactory.simple(
            sensor=DetectorFactory.sensor("PAD"),
            distance=setting_specific_ai.distance(),
            beam_centre=[
                setting_specific_ai.xbeam(),
                setting_specific_ai.ybeam()
            ],
            fast_direction="+x",
            slow_direction="+y",
            pixel_size=[self.pixel_size, self.pixel_size],
            image_size=[self.inputpd['size1'], self.inputpd['size1']],
        )

        direct = matrix.sqr(
            setting_specific_ai.getOrientation().direct_matrix())
        from dxtbx.model import Crystal
        crystal = Crystal(
            real_space_a=matrix.row(direct[0:3]),
            real_space_b=matrix.row(direct[3:6]),
            real_space_c=matrix.row(direct[6:9]),
            space_group_symbol=sg,
        )
        crystal.set_mosaicity(setting_specific_ai.getMosaicity())
        if isoform is not None:
            newB = matrix.sqr(isoform.fractionalization_matrix()).transpose()
            crystal.set_B(newB)

        from dxtbx.model import Experiment, ExperimentList
        experiments = ExperimentList()
        experiments.append(
            Experiment(beam=beam, detector=detector, crystal=crystal))

        print beam
        print detector
        print crystal
        return experiments
def _export_unmerged_mtz(params, experiments, reflection_table):
    """Export data to unmerged_mtz format (as single file or split by wavelength)."""
    from dials.command_line.export import MTZExporter
    from dials.command_line.export import phil_scope as export_phil_scope

    export_params = export_phil_scope.extract()

    export_params.intensity = ["scale"]
    export_params.mtz.partiality_threshold = params.cut_data.partiality_cutoff
    export_params.mtz.crystal_name = params.output.crystal_name
    if params.cut_data.d_min:
        export_params.mtz.d_min = params.cut_data.d_min
    if len(params.output.unmerged_mtz) > 1:
        from dxtbx.model import ExperimentList

        wavelengths = match_wavelengths(experiments)
        assert len(params.output.unmerged_mtz) == len(wavelengths.keys())
        for filename, wavelength in zip(params.output.unmerged_mtz,
                                        wavelengths.keys()):
            export_params.mtz.hklout = filename
            logger.info("\nSaving output to an unmerged mtz file to %s.",
                        filename)
            exps = ExperimentList()
            ids = []
            for i, exp in enumerate(experiments):
                if i in wavelengths[wavelength]:
                    exps.append(exp)
                    ids.append(exp.identifier)
            exporter = MTZExporter(
                export_params,
                exps,
                [reflection_table.select_on_experiment_identifiers(ids)],
            )
            exporter.export()
    else:
        logger.info(
            "\nSaving output to an unmerged mtz file to %s.",
            params.output.unmerged_mtz[0],
        )
        export_params.mtz.hklout = params.output.unmerged_mtz[0]
        exporter = MTZExporter(export_params, experiments, [reflection_table])
        exporter.export()
Beispiel #27
0
def test_UnitCellAnalysisObserver():
    # generate some random unit cells
    sgi = sgtbx.space_group_info("P1")
    unit_cells = [
        sgi.any_compatible_unit_cell(volume=random.uniform(990, 1010))
        for i in range(10)
    ]

    # generate experiment list
    experiments = ExperimentList()
    U = matrix.identity(3)
    for uc in unit_cells:
        B = matrix.sqr(uc.fractionalization_matrix()).transpose()
        direct_matrix = (U * B).inverse()
        experiments.append(
            Experiment(crystal=Crystal(
                direct_matrix[:3],
                direct_matrix[3:6],
                direct_matrix[6:9],
                space_group=sgi.group(),
            )))

    # generate dendrogram
    crystal_symmetries = [
        expt.crystal.get_crystal_symmetry() for expt in experiments
    ]
    lattice_ids = experiments.identifiers()
    ucs = UnitCellCluster.from_crystal_symmetries(crystal_symmetries,
                                                  lattice_ids=lattice_ids)
    _, dendrogram, _ = ucs.ab_cluster(write_file_lists=False, doplot=False)

    # setup script
    script = mock.Mock()
    script._experiments = experiments
    script.unit_cell_dendrogram = dendrogram

    # test the observer
    observer = observers.UnitCellAnalysisObserver()
    observer.update(script)
    assert set(observer.data) == {"experiments", "dendrogram"}
    d = observer.make_plots()
    assert "unit_cell_graphs" in d
Beispiel #28
0
    def from_sequence_and_crystal(imageset, crystal, load_models=True):
        """Create an experiment list from sequence and crystal."""

        assert isinstance(imageset, ImageSequence)

        experiments = ExperimentList()

        if load_models:
            # if imagesequence is still images, make one experiment for each
            # all referencing into the same image set
            if imageset.get_scan().is_still():
                start, end = imageset.get_scan().get_array_range()
                for j in range(start, end):
                    subset = imageset[j : j + 1]
                    experiments.append(
                        Experiment(
                            imageset=imageset,
                            beam=imageset.get_beam(),
                            detector=imageset.get_detector(),
                            goniometer=imageset.get_goniometer(),
                            scan=subset.get_scan(),
                            crystal=crystal,
                        )
                    )
            else:
                experiments.append(
                    Experiment(
                        imageset=imageset,
                        beam=imageset.get_beam(),
                        detector=imageset.get_detector(),
                        goniometer=imageset.get_goniometer(),
                        scan=imageset.get_scan(),
                        crystal=crystal,
                    )
                )

            return experiments

        else:
            return ExperimentList([Experiment(imageset=imageset, crystal=crystal)])
def test_experimentlist_with_identifiers():
    from dxtbx.model import Beam, Detector, Goniometer, Scan

    # Initialise a list of experiments
    experiments = ExperimentList()

    experiments.append(
        Experiment(beam=Beam(s0=(0, 0, -1)),
                   detector=Detector(),
                   identifier="bacon"))

    experiments.append(
        Experiment(beam=Beam(s0=(0, 0, -1)),
                   detector=Detector(),
                   identifier="sausage"))

    with pytest.raises(Exception):
        experiments.append(
            Experiment(beam=Beam(), detector=Detector(), identifier="bacon"))

    d = experiments.to_dict()
    e2 = ExperimentListDict(d).decode()

    assert experiments[0].identifier == e2[0].identifier
    assert experiments[1].identifier == e2[1].identifier

    assert tuple(experiments.identifiers()) == ("bacon", "sausage")
    experiments[0].identifier = "spam"
    assert tuple(experiments.identifiers()) == ("spam", "sausage")
def _export_multi_merged_mtz(params, experiments, reflection_table):
    from dxtbx.model import ExperimentList

    wavelengths = match_wavelengths(experiments)
    assert len(params.output.merged_mtz) == len(wavelengths.keys())
    for filename, wavelength in zip(params.output.merged_mtz,
                                    wavelengths.keys()):
        exps = ExperimentList()
        ids = []
        for i, exp in enumerate(experiments):
            if i in wavelengths[wavelength]:
                exps.append(exp)
                ids.append(exp.identifier)
        refls = reflection_table.select_on_experiment_identifiers(ids)
        scaled_array = scaled_data_as_miller_array([refls], exps)
        merged = scaled_array.merge_equivalents(
            use_internal_variance=params.output.use_internal_variance).array()
        merged_anom = (scaled_array.as_anomalous_array().merge_equivalents(
            use_internal_variance=params.output.use_internal_variance).array())
        mtz_file = make_merged_mtz_file(merged, merged_anom)
        logger.info("\nSaving output to a merged mtz file to %s.\n", filename)
        mtz_file.write(filename)