Example #1
0
def test_experimentlist_from_file(monkeypatch, dials_regression, tmpdir):
    # With the default check_format=True this file should fail to load with an
    # appropriate error as we can't find the images on disk
    with monkeypatch.context() as m:
        m.delenv("DIALS_REGRESSION", raising=False)
        with pytest.raises(IOError) as e:
            exp_list = ExperimentList.from_file(
                os.path.join(dials_regression, "experiment_test_data",
                             "experiment_1.json"))
    assert e.value.errno == errno.ENOENT
    assert "No such file or directory" in str(e.value)
    assert "centroid_0001.cbf" in str(e.value)

    # Setting check_format=False should allow the file to load
    exp_list = ExperimentList.from_file(
        os.path.join(dials_regression, "experiment_test_data",
                     "experiment_1.json"),
        check_format=False,
    )
    assert len(exp_list) == 1
    assert exp_list[0].beam

    # This allows expansion of environment variables in regression files, enabling the
    # file to load with check_format=True

    with monkeypatch.context() as m:
        m.setenv("DIALS_REGRESSION", dials_regression)
        exp_list = ExperimentList.from_file(
            os.path.join(dials_regression, "experiment_test_data",
                         "experiment_1.json"))
    assert len(exp_list) == 1
    assert exp_list[0].beam
Example #2
0
def test_experimentlist_from_file(dials_regression, tmpdir):
    # This allows expansion of environment variables in regression files
    os.environ["DIALS_REGRESSION"] = dials_regression

    exp_list = ExperimentList.from_file(
        os.path.join(dials_regression, "experiment_test_data",
                     "experiment_1.json"))
    assert len(exp_list) == 1
    assert exp_list[0].beam
    # Try loading from a pickle
    exp_list.as_pickle(tmpdir / "el.pickle")
    exp_list_pk = ExperimentList.from_file(tmpdir / "el.pickle")
    assert len(exp_list_pk) == 1
    assert exp_list[0].beam
def test_correct_correction(dials_data):
    """Test that the anvil absorption correction is producing expected values."""
    data_dir = dials_data("centroid_test_data")

    # We'll need an integrated reflection table and an experiment list.
    reflections_file = data_dir.join("integrated.pickle")
    experiments_file = data_dir.join("experiments.json")

    # We need only test with the first ten reflections.
    reflections = flex.reflection_table.from_file(reflections_file)
    reflections = reflections.select(flex.size_t_range(10))

    experiment = ExperimentList.from_file(experiments_file)[0]

    # Test the correction that would be applied to a DAC with 1.5mm-thick anvils,
    # aligned along the z-axis at goniometer zero-datum.
    old_reflections = copy.deepcopy(reflections)
    correct_intensities_for_dac_attenuation(experiment, reflections, (0, 0, 1),
                                            1.5)

    cases = {
        "intensity.sum.value": reflections.flags.integrated_sum,
        "intensity.sum.variance": reflections.flags.integrated_sum,
        "intensity.prf.value": reflections.flags.integrated_prf,
        "intensity.prf.variance": reflections.flags.integrated_prf,
    }
    corrections = flex.double([
        0,
        6.653068275094517,
        6.522657529202368,
        6.3865190053761,
        6.587270967838122,
        6.43403642876391,
        6.39216742203502,
        0,
        6.152148372872684,
        6.0474840161407375,
    ])
    for case, flag in cases.items():
        flagged = reflections.get_flags(flag)

        target_correction = corrections.select(flagged)
        if "variance" in case:
            target_correction = flex.pow2(target_correction)

        intensity_correction = (reflections[case] /
                                old_reflections[case]).select(flagged)

        # Check that the un-integrated reflections are unchanged.
        assert pytest.approx(reflections[case].select(
            ~flagged)) == old_reflections[case].select(~flagged), (
                "Un-integrated reflections have been erroneously "
                "'corrected'.")

        # Check that the applied corrections are correct.
        assert pytest.approx(
            intensity_correction, rel=1e-5
        ) == list(target_correction), (
            "The applied intensity correction to %s doesn't seem to be correct."
            % case)
Example #4
0
def test_mtz_primitive_cell(dials_data, tmpdir):
    scaled_expt = dials_data("insulin_processed") / "scaled.expt"
    scaled_refl = dials_data("insulin_processed") / "scaled.refl"

    # First reindex to the primitive setting
    expts = ExperimentList.from_file(scaled_expt.strpath, check_format=False)
    cs = expts[0].crystal.get_crystal_symmetry()
    cb_op = cs.change_of_basis_op_to_primitive_setting()
    procrunner.run(
        [
            "dials.reindex",
            scaled_expt.strpath,
            scaled_refl.strpath,
            'change_of_basis_op="%s"' % cb_op,
        ],
        working_directory=tmpdir.strpath,
    )

    # Now export the reindexed experiments/reflections
    procrunner.run(
        ["dials.export", "reindexed.expt", "reindexed.refl"],
        working_directory=tmpdir.strpath,
    )

    mtz_obj = mtz.object(os.path.join(tmpdir.strpath, "scaled.mtz"))
    cs_primitive = cs.change_basis(cb_op)
    assert mtz_obj.space_group() == cs_primitive.space_group()
    refl = flex.reflection_table.from_file(scaled_refl.strpath)
    refl = refl.select(~refl.get_flags(refl.flags.bad_for_scaling, all=False))
    for ma in mtz_obj.as_miller_arrays():
        assert ma.crystal_symmetry().is_similar_symmetry(cs_primitive)
        assert ma.d_max_min() == pytest.approx(
            (flex.max(refl["d"]), flex.min(refl["d"]))
        )
Example #5
0
def test_experimentlist_change_basis(dials_data):
    experiments = ExperimentList()
    for i in range(4):
        experiments.extend(
            ExperimentList.from_file(
                dials_data("vmxi_proteinase_k_sweeps") /
                ("experiments_%i.expt" % i),
                check_format=False,
            ))
    reindexed_uc = (68.368, 103.968, 68.368, 90.000, 90.000, 90.000)
    reindexed_sg = sgtbx.space_group_info("P 4 2 2 (b,c,a)").group()
    cb_op = sgtbx.change_of_basis_op("-a,-c,-b")
    for cb_op in (cb_op, [cb_op] * len(experiments)):
        expts_rdx = experiments.change_basis(cb_op)
        for expt in expts_rdx:
            assert expt.crystal.get_unit_cell().parameters() == pytest.approx(
                reindexed_uc, abs=0.1)
            assert expt.crystal.get_space_group() == reindexed_sg

    experiments.change_basis(cb_op, in_place=True)
    for expt in experiments:
        assert expt.crystal.get_unit_cell().parameters() == pytest.approx(
            reindexed_uc, abs=0.1)
        assert expt.crystal.get_space_group() == reindexed_sg

    with pytest.raises(AssertionError):
        experiments.change_basis([cb_op, cb_op])
Example #6
0
def test_all_expt_ids_have_expts(dials_data, tmpdir):
    result = procrunner.run(
        [
            "dials.index",
            dials_data("vmxi_thaumatin_grid_index").join("split_07602.expt"),
            dials_data("vmxi_thaumatin_grid_index").join("split_07602.refl"),
            "stills.indexer=sequences",
            "indexing.method=real_space_grid_search",
            "space_group=P4",
            "unit_cell=58,58,150,90,90,90",
            "max_lattices=8",
            "beam.fix=all",
            "detector.fix=all",
        ],
        working_directory=tmpdir,
    )
    assert not result.returncode and not result.stderr
    assert tmpdir.join("indexed.expt").check(file=1)
    assert tmpdir.join("indexed.refl").check(file=1)

    refl = flex.reflection_table.from_file(tmpdir / "indexed.refl")
    expt = ExperimentList.from_file(tmpdir / "indexed.expt",
                                    check_format=False)

    assert flex.max(refl["id"]) + 1 == len(expt)
Example #7
0
def _run_integration(scope, experiments_file, reflections_file):
    # type: (scope, str, str) -> Tuple[ExperimentList, flex.reflection_table]
    """Run integration programatically, compatible with multiple DIALS versions.

    Args:
        scope: The dials.integrate phil scope
        experiments_file: Path to the experiment list file
        reflections_file: Path to the reflection table file
    """

    if hasattr(dials.command_line.integrate, "run_integration"):
        # DIALS 3.1+ interface
        expts, refls, _ = dials.command_line.integrate.run_integration(
            scope.extract(),
            ExperimentList.from_file(experiments_file),
            flex.reflection_table.from_file(reflections_file),
        )
    elif hasattr(dials.command_line.integrate, "Script"):
        # Pre-3.1-style programmatic interface
        expts, refls = dials.command_line.integrate.Script(phil=scope).run(
            [experiments_file, reflections_file])
    else:
        raise RuntimeError(
            "Could not find dials.integrate programmatic interface 'run_integration' or 'Script'"
        )

    return expts, refls
Example #8
0
def load_reference_geometries(geometry_file_list):
    logger.debug("Collecting reference instrument models.")
    ref_geoms = {
        # Note that 'index' is the index of the experiment in the expt list file,
        # as per dials.show, rather than the UID string of the experiment.
        (expt.detector, expt.beam, f, index)
        for f in geometry_file_list for index, expt in enumerate(
            ExperimentList.from_file(f, check_format=False))
    }

    logger.debug("Removing duplicate reference geometries.")
    duplicates = set()
    for a, b in filter(duplicates.isdisjoint,
                       itertools.combinations(ref_geoms, 2)):
        if compare_geometries(a[0], b[0]):
            logger.debug(f"Experiment {b[3]} of {b[2]} is a duplicate.")
            duplicates.add(b)

    ref_geoms -= duplicates

    n = len(ref_geoms)
    logger.debug(
        f"Found {n} unique reference geometr{'ies' if n != 1 else 'y'}.")
    for geometry in ref_geoms:
        logger.debug(f"Experiment {geometry[3]} of {geometry[2]} is unique.")

    return [{
        "detector": geometry[0],
        "beam": geometry[1]
    } for geometry in ref_geoms]
Example #9
0
    def _import(self, files):  # type: (List[str]) -> None
        """
        Try to run a quick call of dials.import.  Failing that, run a slow call.

        Try initially to construct file name templates contiguous groups of files.
        Failing that, pass a full list of the files to the importer (slower).

        Args:
            files:  List of image filenames.
        """
        info("\nImporting data...")
        if len(files) == 1:
            if os.path.isdir(files[0]):
                debug("You specified a directory. Importing all CBF files in "
                      "that directory.")
                # TODO Support HDF5.
                files = [
                    os.path.join(files[0], f) for f in os.listdir(files[0])
                    if f.endswith(".cbf") or f.endswith(".cbf.gz")
                    or f.endswith(".cbf.bz2")
                ]
            elif len(files[0].split(":")) == 3:
                debug("You specified an image range in the xia2 format.  "
                      "Importing all specified files.")
                template, start, end = files[0].split(":")
                template = screen19.make_template(template)[0]
                start, end = int(start), int(end)
                if not self._quick_import_templates([(template,
                                                      (start, end))]):
                    warning("Could not import specified image range.")
                    sys.exit(1)
                info("Quick import successful.")
                return
            elif files[0].endswith(".expt"):
                debug("You specified an existing experiment list file.  "
                      "No import necessary.")
                try:
                    self.expts = ExperimentList.from_file(files[0])
                except (IOError, PickleError, ValueError):
                    pass
                else:
                    self.params.dials_import.output.experiments = files[0]
                    if self.expts:
                        return

        if not files:
            warning("No images found matching input.")
            sys.exit(1)

        # Can the files be quick-imported?
        if self._quick_import(files):
            info("Quick import successful.")
            return

        self.params.dials_import.input.experiments = files
        self._run_dials_import()
Example #10
0
def test_generate_mask_trusted_range(dials_data, tmpdir):
    # https://github.com/dials/dials/issues/978

    image_files = [
        f.strpath for f in dials_data("x4wide").listdir("*.cbf", sort=True)
    ]
    with tmpdir.as_cwd():
        # Import as usual
        importer = ImageImporter(import_phil_scope)
        importer.import_image(["output.experiments=no-overloads.expt"] +
                              image_files)

        experiments = ExperimentList.from_file(
            tmpdir.join("no-overloads.expt").strpath)
        params = phil_scope.fetch(
            phil.parse("untrusted.rectangle=100,200,100,200")).extract()
        params.output.mask = "pixels1.mask"
        generate_mask(experiments, params)

        # Import with narrow trusted range to produce overloads
        importer = ImageImporter(import_phil_scope)
        importer.import_image(
            ["trusted_range=-1,100", "output.experiments=overloads.expt"] +
            image_files)

        experiments = ExperimentList.from_file(
            tmpdir.join("overloads.expt").strpath)
        params = phil_scope.fetch(
            phil.parse("untrusted.rectangle=100,200,100,200")).extract()
        params.output.mask = "pixels2.mask"
        generate_mask(experiments, params)

    with tmpdir.join("pixels1.mask").open("rb") as fh:
        mask1 = pickle.load(fh)
    with tmpdir.join("pixels2.mask").open("rb") as fh:
        mask2 = pickle.load(fh)

    # Overloads should not be included in the mask
    assert (mask1[0] == mask2[0]).all_eq(True)
Example #11
0
def load_expt_from_df(df, opt=False):
    """

    :param df: a hopper-formatted pandas dataframe with a single row
    :return: experiment
    """
    if opt:
        data_expt_name = df.opt_exp_name.values[0]
    else:
        data_expt_name = df.exp_name.values[0]
    assert os.path.exists(data_expt_name)
    data_expt = ExperimentList.from_file(data_expt_name)[0]
    return data_expt
Example #12
0
def test_generate_mask_with_untrusted_rectangle(experiments_masks, tmpdir):
    experiments, masks = experiments_masks

    params = phil_scope.fetch(
        phil.parse("untrusted.rectangle=100,200,100,200")).extract()
    params.output.experiments = "masked.expt"
    with tmpdir.as_cwd():
        generate_mask(experiments, params)

    assert all(tmpdir.join(mask).check() for mask in masks)
    assert tmpdir.join("masked.expt").check()

    experiments = ExperimentList.from_file(tmpdir.join("masked.expt").strpath)
    imageset = experiments.imagesets()[0]
    assert imageset.external_lookup.mask.filename == tmpdir.join(
        masks[0]).strpath
Example #13
0
def test_generate_mask_function_with_untrusted_rectangle(experiments_masks, tmpdir):
    experiments, masks = experiments_masks
    masks = [tmpdir.join(mask.replace("pixels", "pixels4")) for mask in masks]

    params = phil_scope.fetch().extract()
    params.output.mask = tmpdir.join("pixels4.mask").strpath
    params.output.experiments = tmpdir.join("masked.expt").strpath
    params.untrusted.rectangle = [100, 200, 100, 200]
    generate_mask(experiments, params)

    assert all(mask.check() for mask in masks)
    assert tmpdir.join("masked.expt").check()

    experiments = ExperimentList.from_file(tmpdir.join("masked.expt").strpath)
    associated_masks = [
        imageset.external_lookup.mask.filename for imageset in experiments.imagesets()
    ]
    assert all(assoc_mask == mask for assoc_mask, mask in zip(associated_masks, masks))
Example #14
0
def test_blank_counts_analysis(dials_data):
    expts = ExperimentList.from_file(dials_data("insulin_processed") /
                                     "imported.expt",
                                     check_format=False)
    refl = flex.reflection_table.from_file(
        dials_data("insulin_processed") / "strong.refl")
    results = detect_blanks.blank_counts_analysis(refl,
                                                  expts[0].scan,
                                                  phi_step=5,
                                                  fractional_loss=0.1)
    assert set(results.keys()) == {"data", "layout", "blank_regions"}
    assert results["data"][0]["x"] == [
        2.5,
        7.5,
        12.5,
        17.5,
        22.5,
        27.5,
        32.5,
        37.5,
        42.5,
    ]
    assert results["data"][0]["y"] == [
        2827,
        2589,
        2502,
        2464,
        2515,
        2490,
        2441,
        2500,
        2505,
    ]
    assert not any(results["data"][0]["blank"])
    assert results["blank_regions"] == []

    z = refl["xyzobs.px.value"].parts()[2]
    refl_subset = refl.select((z < 10) | (z > 20))
    results = detect_blanks.blank_counts_analysis(refl_subset,
                                                  expts[0].scan,
                                                  phi_step=2,
                                                  fractional_loss=0.1)
    assert results["data"][0]["blank"].count(True) == 5
    assert results["blank_regions"] == [(10, 21)]
Example #15
0
def test_connected_components_centred_cell(dials_data):
    experiment = ExperimentList.from_file(
        dials_data("insulin_processed").join("scaled.expt").strpath, check_format=False
    )[0]

    experiment.scan.set_image_range((1, 10))
    predict = ScanStaticReflectionPredictor(experiment, dmin=3, margin=1)
    refl = predict.for_ub(experiment.crystal.get_A())
    miller_set = miller.set(
        experiment.crystal.get_crystal_symmetry(),
        refl["miller_index"],
        anomalous_flag=False,
    )
    miller_array = miller_set.d_spacings().resolution_filter(d_min=3)
    complete_set, unique_ms = missing_reflections.connected_components(miller_array)
    assert [ms.size() for ms in unique_ms] == [581, 32, 29, 6, 3, 3, 3, 2]
    # Verify that all the indices reported missing are actually missing from the input
    for ms in unique_ms:
        assert ms.common_set(miller_array.map_to_asu()).size() == 0
    assert complete_set.completeness() == 1
Example #16
0
def test_load_reference_geometries(dials_data):
    """
    Test `xia2.Schema.load_reference_geometries`.

    Test the function that finds the set of unique instrument models from a list
    of experiment list files.

    There are eight input instrument models, of which only two are unique.
    """
    files = ["scaled_20_25.expt", "scaled_30.expt", "scaled_35.expt"]
    files = [(dials_data("l_cysteine_4_sweeps_scaled") / f).strpath for f in files]
    files.append((dials_data("l_cysteine_dials_output") / "indexed.expt").strpath)

    num_input = sum(len(ExperimentList.from_file(f, check_format=False)) for f in files)
    assert num_input == 8, "Expected to find eight experiments, one for each sweep."

    unique_geometries = load_reference_geometries(files)
    assert len(unique_geometries) == 2, "Expected to find two unique instrument models."

    detectors = (geom["detector"] for geom in unique_geometries)
    assert not compare_geometries(*detectors), "Unique detectors cannot be equivalent."
Example #17
0
def test_few_reflections(dials_data, run_in_tmpdir):
    u"""
    Test that dials.symmetry does something sensible if given few reflections.

    Use some example integrated data generated from a ten-image 1° sweep.  These
    contain a few dozen integrated reflections.

    Args:
        dials_data: DIALS custom Pytest fixture for access to test data.
        run_in_tmpdir: DIALS custom Pytest fixture to run this test in a temporary
                       directory.
    """
    # Get and use the default parameters for dials.symmetry.
    params = symmetry.phil_scope.fetch(source=parse("")).extract()

    # Use the integrated data from the first ten images of the first sweep.
    data_dir = dials_data("l_cysteine_dials_output")
    experiments = ExperimentList.from_file(data_dir / "11_integrated.expt")
    reflections = [flex.reflection_table.from_file(data_dir / "11_integrated.refl")]

    # Run dials.symmetry on the above data files.
    symmetry.symmetry(experiments, reflections, params)
Example #18
0
    def _create_profile_model(self):  # type: () -> bool
        """
        Run `dials.create_profile_model` on indexed reflections.

        The indexed experiment list will be overwritten with a copy that includes
        the profile model but is otherwise identical.

        Returns:
            Boolean value indicating whether it was possible to determine a profile
            model from the data.
        """
        info("\nCreating profile model...")
        command = [
            "dials.create_profile_model",
            self.params.dials_index.output.experiments,
            self.params.dials_index.output.reflections,
            "output = %s" % self.params.dials_index.output.experiments,
        ]
        result = procrunner.run(command,
                                print_stdout=False,
                                debug=procrunner_debug)
        debug("result = %s", screen19.prettyprint_dictionary(result))
        self._sigma_m = None
        if result["exitcode"] == 0:
            db = ExperimentList.from_file(
                self.params.dials_index.output.experiments)[0]
            self._oscillation = db.imageset.get_scan().get_oscillation()[1]
            self._sigma_m = db.profile.sigma_m()
            info(
                u"%d images, %s° oscillation, σ_m=%.3f°",
                db.imageset.get_scan().get_num_images(),
                str(self._oscillation),
                self._sigma_m,
            )
            info("Successfully completed (%.1f sec)", result["runtime"])
            return True
        warning("Failed with exit code %d", result["exitcode"])
        return False
Example #19
0
def test_connected_components(dials_data):
    experiment = ExperimentList.from_file(
        dials_data("centroid_test_data").join("experiments.json").strpath
    )[0]

    image_ranges = [(1, 9), (1, 100), (1, 1000)]
    expected_ms_sizes = [[755], [242, 14, 10, 5, 2, 2, 2], []]
    for image_range, expected_sizes in zip(image_ranges, expected_ms_sizes):
        experiment.scan.set_image_range(image_range)
        predict = ScanStaticReflectionPredictor(experiment, dmin=3, margin=1)
        refl = predict.for_ub(experiment.crystal.get_A())
        miller_set = miller.set(
            experiment.crystal.get_crystal_symmetry(),
            refl["miller_index"],
            anomalous_flag=False,
        )
        miller_array = miller_set.d_spacings().resolution_filter(d_min=3)
        complete_set, unique_ms = missing_reflections.connected_components(miller_array)
        assert len(unique_ms) == len(expected_sizes)
        assert [ms.size() for ms in unique_ms] == expected_sizes
        # Verify that all the indices reported missing are actually missing from the input
        for ms in unique_ms:
            assert ms.common_set(miller_array.map_to_asu()).size() == 0
        assert complete_set.completeness() == 1
Example #20
0
def test_forward_no_model(dials_data):
    expt = ExperimentList.from_file(
        dials_data("centroid_test_data").join(
            "imported_experiments.json").strpath)[0]

    # Get the models
    beam = expt.beam
    detector = expt.detector
    gonio = expt.goniometer
    scan = expt.scan
    scan.set_image_range((0, 1000))

    # Set some parameters
    sigma_divergence = 0.00101229
    mosaicity = 0.157 * math.pi / 180
    n_sigma = 3
    grid_size = 20
    delta_divergence = n_sigma * sigma_divergence

    step_size = delta_divergence / grid_size
    delta_divergence2 = delta_divergence + step_size * 0.5
    delta_mosaicity = n_sigma * mosaicity

    # Create the bounding box calculator
    calculate_bbox = BBoxCalculator3D(beam, detector, gonio, scan,
                                      delta_divergence2, delta_mosaicity)

    # Initialise the transform
    spec = transform.TransformSpec(beam, detector, gonio, scan,
                                   sigma_divergence, mosaicity, n_sigma + 1,
                                   grid_size)

    # tst_conservation_of_counts(self):

    random.seed(0)

    assert len(detector) == 1

    s0 = beam.get_s0()
    m2 = gonio.get_rotation_axis()
    s0_length = matrix.col(beam.get_s0()).length()

    # Create an s1 map
    s1_map = transform.beam_vector_map(detector[0], beam, True)

    for i in range(100):

        # Get random x, y, z
        x = random.uniform(300, 1800)
        y = random.uniform(300, 1800)
        z = random.uniform(500, 600)

        # Get random s1, phi, panel
        s1 = matrix.col(detector[0].get_pixel_lab_coord(
            (x, y))).normalize() * s0_length
        phi = scan.get_angle_from_array_index(z, deg=False)
        panel = 0

        # Calculate the bounding box
        bbox = calculate_bbox(s1, z, panel)
        x0, x1, y0, y1, z0, z1 = bbox

        # Create the coordinate system
        cs = CoordinateSystem(m2, s0, s1, phi)
        if abs(cs.zeta()) < 0.1:
            continue

        # The grid index generator
        step_size = delta_divergence / grid_size
        grid_index = transform.GridIndexGenerator(cs, x0, y0,
                                                  (step_size, step_size),
                                                  grid_size, s1_map)

        # Create the image
        # image = flex.double(flex.grid(z1 - z0, y1 - y0, x1 - x0), 1)
        image = gaussian((z1 - z0, y1 - y0, x1 - x0), 10.0,
                         (z - z0, y - y0, x - x0), (2.0, 2.0, 2.0))
        mask = flex.bool(flex.grid(image.all()), False)
        for j in range(y1 - y0):
            for i in range(x1 - x0):
                inside = False
                gx00, gy00 = grid_index(j, i)
                gx01, gy01 = grid_index(j, i + 1)
                gx10, gy10 = grid_index(j + 1, i)
                gx11, gy11 = grid_index(j + 1, i + 1)
                mingx = min([gx00, gx01, gx10, gx11])
                maxgx = max([gx00, gx01, gx10, gx11])
                mingy = min([gy00, gy01, gy10, gy11])
                maxgy = max([gy00, gy01, gy10, gy11])
                if (mingx >= 0 and maxgx < 2 * grid_size + 1 and mingy >= 0
                        and maxgy < 2 * grid_size + 1):
                    inside = True
                for k in range(1, z1 - z0 - 1):
                    mask[k, j, i] = inside

        # Transform the image to the grid
        transformed = transform.TransformForwardNoModel(
            spec, cs, bbox, 0, image.as_double(), mask)
        grid = transformed.profile()

        # Get the sums and ensure they're the same
        eps = 1e-7
        sum_grid = flex.sum(grid)
        sum_image = flex.sum(flex.double(flex.select(image, flags=mask)))
        assert abs(sum_grid - sum_image) <= eps

        mask = flex.bool(flex.grid(image.all()), True)
        transformed = transform.TransformForwardNoModel(
            spec, cs, bbox, 0, image.as_double(), mask)
        grid = transformed.profile()

        # Boost the bbox to make sure all intensity is included
        x0, x1, y0, y1, z0, z1 = bbox
        bbox2 = (x0 - 10, x1 + 10, y0 - 10, y1 + 10, z0 - 10, z1 + 10)

        # Do the reverse transform
        transformed = transform.TransformReverseNoModel(
            spec, cs, bbox2, 0, grid)
        image2 = transformed.profile()

        # Check the sum of pixels are the same
        sum_grid = flex.sum(grid)
        sum_image = flex.sum(image2)
        assert abs(sum_grid - sum_image) <= eps

        # Do the reverse transform
        transformed = transform.TransformReverseNoModel(
            spec, cs, bbox, 0, grid)
        image2 = transformed.profile()

        from dials.algorithms.statistics import pearson_correlation_coefficient

        cc = pearson_correlation_coefficient(image.as_1d().as_double(),
                                             image2.as_1d())
        assert cc >= 0.99
Example #21
0
    def _refine(self):
        for idxr in set(self._refinr_indexers.values()):
            experiments = idxr.get_indexer_experiment_list()

            indexed_experiments = idxr.get_indexer_payload(
                "experiments_filename")
            indexed_reflections = idxr.get_indexer_payload("indexed_filename")

            # If multiple sweeps but not doing joint refinement, get only the
            # relevant reflections.
            multi_sweep = PhilIndex.params.xia2.settings.multi_sweep_refinement
            if len(experiments) > 1 and not multi_sweep:
                xsweeps = idxr._indxr_sweeps
                assert len(xsweeps) == len(experiments)
                # Don't do joint refinement
                assert len(self._refinr_sweeps) == 1
                xsweep = self._refinr_sweeps[0]
                i = xsweeps.index(xsweep)
                experiments = experiments[i:i + 1]

                # Extract and output experiment and reflections for current sweep
                indexed_experiments = os.path.join(
                    self.get_working_directory(),
                    "%s_indexed.expt" % xsweep.get_name())
                indexed_reflections = os.path.join(
                    self.get_working_directory(),
                    "%s_indexed.refl" % xsweep.get_name())

                experiments.as_file(indexed_experiments)

                reflections = flex.reflection_table.from_file(
                    idxr.get_indexer_payload("indexed_filename"))
                sel = reflections["id"] == i
                assert sel.count(True) > 0
                imageset_id = reflections["imageset_id"].select(sel)
                assert imageset_id.all_eq(imageset_id[0])
                sel = reflections["imageset_id"] == imageset_id[0]
                reflections = reflections.select(sel)
                # set indexed reflections to id == 0 and imageset_id == 0
                reflections["id"].set_selected(reflections["id"] == i, 0)
                reflections["imageset_id"] = flex.int(len(reflections), 0)
                reflections.as_file(indexed_reflections)

            # currently only handle one lattice/refiner
            assert len(experiments.crystals()) == 1

            scan_static = PhilIndex.params.dials.refine.scan_static

            # Avoid doing scan-varying refinement on narrow wedges.
            scan_oscillation_ranges = []
            for experiment in experiments:
                start, end = experiment.scan.get_oscillation_range()
                scan_oscillation_ranges.append(end - start)

            min_oscillation_range = min(scan_oscillation_ranges)

            if (PhilIndex.params.dials.refine.scan_varying
                    and min_oscillation_range > 5
                    and not PhilIndex.params.dials.fast_mode):
                scan_varying = PhilIndex.params.dials.refine.scan_varying
            else:
                scan_varying = False

            if scan_static:
                refiner = self.Refine()
                refiner.set_experiments_filename(indexed_experiments)
                refiner.set_indexed_filename(indexed_reflections)
                refiner.set_scan_varying(False)
                refiner.run()
                self._refinr_experiments_filename = (
                    refiner.get_refined_experiments_filename())
                self._refinr_indexed_filename = refiner.get_refined_filename()
            else:
                self._refinr_experiments_filename = indexed_experiments
                self._refinr_indexed_filename = indexed_reflections

            if scan_varying:
                refiner = self.Refine()
                refiner.set_experiments_filename(
                    self._refinr_experiments_filename)
                refiner.set_indexed_filename(self._refinr_indexed_filename)
                if min_oscillation_range < 36:
                    refiner.set_interval_width_degrees(min_oscillation_range /
                                                       2)
                refiner.run()
                self._refinr_experiments_filename = (
                    refiner.get_refined_experiments_filename())
                self._refinr_indexed_filename = refiner.get_refined_filename()

            if scan_static or scan_varying:
                FileHandler.record_log_file(
                    "%s REFINE" % idxr.get_indexer_full_name(),
                    refiner.get_log_file())
                report = self.Report()
                report.set_experiments_filename(
                    self._refinr_experiments_filename)
                report.set_reflections_filename(self._refinr_indexed_filename)
                html_filename = os.path.join(
                    self.get_working_directory(),
                    "%i_dials.refine.report.html" % report.get_xpid(),
                )
                report.set_html_filename(html_filename)
                report.run()
                FileHandler.record_html_file(
                    "%s REFINE" % idxr.get_indexer_full_name(), html_filename)

            experiments = ExperimentList.from_file(
                self._refinr_experiments_filename)
            self.set_refiner_payload("models.expt",
                                     self._refinr_experiments_filename)
            self.set_refiner_payload("observations.refl",
                                     self._refinr_indexed_filename)

            # this is the result of the cell refinement
            self._refinr_cell = experiments.crystals()[0].get_unit_cell(
            ).parameters()
Example #22
0
def test_forward(dials_data):
    expt = ExperimentList.from_file(
        dials_data("centroid_test_data").join(
            "imported_experiments.json").strpath)[0]

    # Get the models
    beam = expt.beam
    detector = expt.detector
    gonio = expt.goniometer
    scan = expt.scan

    # Set some parameters
    sigma_divergence = 0.00101229
    mosaicity = 0.157 * math.pi / 180
    n_sigma = 3
    grid_size = 7
    delta_divergence = n_sigma * sigma_divergence

    step_size = delta_divergence / grid_size
    delta_divergence2 = delta_divergence + step_size * 0.5
    delta_mosaicity = n_sigma * mosaicity

    # Create the bounding box calculator
    calculate_bbox = BBoxCalculator3D(beam, detector, gonio, scan,
                                      delta_divergence2, delta_mosaicity)

    # Initialise the transform
    spec = transform.TransformSpec(beam, detector, gonio, scan,
                                   sigma_divergence, mosaicity, n_sigma + 1,
                                   grid_size)

    # tst_conservation_of_counts(self):

    assert len(detector) == 1

    s0 = beam.get_s0()
    m2 = gonio.get_rotation_axis()
    s0_length = matrix.col(beam.get_s0()).length()

    # Create an s1 map
    s1_map = transform.beam_vector_map(detector[0], beam, True)

    for i in range(100):

        # Get random x, y, z
        x = random.uniform(300, 1800)
        y = random.uniform(300, 1800)
        z = random.uniform(0, 9)

        # Get random s1, phi, panel
        s1 = matrix.col(detector[0].get_pixel_lab_coord(
            (x, y))).normalize() * s0_length
        phi = scan.get_angle_from_array_index(z, deg=False)
        panel = 0

        # Calculate the bounding box
        bbox = calculate_bbox(s1, z, panel)
        x0, x1, y0, y1, z0, z1 = bbox

        # Create the coordinate system
        cs = CoordinateSystem(m2, s0, s1, phi)

        # The grid index generator
        step_size = delta_divergence / grid_size
        grid_index = transform.GridIndexGenerator(cs, x0, y0,
                                                  (step_size, step_size),
                                                  grid_size, s1_map)

        # Create the image
        # image = flex.double(flex.grid(z1 - z0, y1 - y0, x1 - x0), 1)
        image = gaussian((z1 - z0, y1 - y0, x1 - x0), 10.0,
                         (z - z0, y - y0, x - x0), (2.0, 2.0, 2.0))
        mask = flex.bool(flex.grid(image.all()), False)
        for j in range(y1 - y0):
            for i in range(x1 - x0):
                inside = False
                gx00, gy00 = grid_index(j, i)
                gx01, gy01 = grid_index(j, i + 1)
                gx10, gy10 = grid_index(j + 1, i)
                gx11, gy11 = grid_index(j + 1, i + 1)
                mingx = min([gx00, gx01, gx10, gx11])
                maxgx = max([gx00, gx01, gx10, gx11])
                mingy = min([gy00, gy01, gy10, gy11])
                maxgy = max([gy00, gy01, gy10, gy11])
                if (mingx >= 0 and maxgx < 2 * grid_size + 1 and mingy >= 0
                        and maxgy < 2 * grid_size + 1):
                    inside = True
                for k in range(1, z1 - z0 - 1):
                    mask[k, j, i] = inside

        # Transform the image to the grid
        transformed = transform.TransformForward(spec, cs, bbox, 0,
                                                 image.as_double(), mask)
        grid = transformed.profile()

        # Get the sums and ensure they're the same
        eps = 1e-7
        sum_grid = flex.sum(grid)
        sum_image = flex.sum(flex.double(flex.select(image, flags=mask)))
        assert abs(sum_grid - sum_image) <= eps

    # Test passed

    # tst_transform_with_background(self):

    assert len(detector) == 1
    s0 = beam.get_s0()
    m2 = gonio.get_rotation_axis()
    s0_length = matrix.col(beam.get_s0()).length()

    # Create an s1 map
    s1_map = transform.beam_vector_map(detector[0], beam, True)

    for i in range(100):

        # Get random x, y, z
        x = random.uniform(300, 1800)
        y = random.uniform(300, 1800)
        z = random.uniform(0, 9)

        # Get random s1, phi, panel
        s1 = matrix.col(detector[0].get_pixel_lab_coord(
            (x, y))).normalize() * s0_length
        phi = scan.get_angle_from_array_index(z, deg=False)
        panel = 0

        # Calculate the bounding box
        bbox = calculate_bbox(s1, z, panel)
        x0, x1, y0, y1, z0, z1 = bbox

        # Create the coordinate system
        cs = CoordinateSystem(m2, s0, s1, phi)

        # The grid index generator
        step_size = delta_divergence / grid_size
        grid_index = transform.GridIndexGenerator(cs, x0, y0,
                                                  (step_size, step_size),
                                                  grid_size, s1_map)

        # Create the image
        # image = flex.double(flex.grid(z1 - z0, y1 - y0, x1 - x0), 1)
        image = gaussian((z1 - z0, y1 - y0, x1 - x0), 10.0,
                         (z - z0, y - y0, x - x0), (2.0, 2.0, 2.0))
        background = flex.random_double(len(image))
        background.resize(image.accessor())
        mask = flex.bool(flex.grid(image.all()), False)
        for j in range(y1 - y0):
            for i in range(x1 - x0):
                inside = False
                gx00, gy00 = grid_index(j, i)
                gx01, gy01 = grid_index(j, i + 1)
                gx10, gy10 = grid_index(j + 1, i)
                gx11, gy11 = grid_index(j + 1, i + 1)
                mingx = min([gx00, gx01, gx10, gx11])
                maxgx = max([gx00, gx01, gx10, gx11])
                mingy = min([gy00, gy01, gy10, gy11])
                maxgy = max([gy00, gy01, gy10, gy11])
                if (mingx >= 0 and maxgx <= 2 * grid_size + 1 and mingy >= 0
                        and maxgy <= 2 * grid_size + 1):
                    inside = True
                for k in range(1, z1 - z0 - 1):
                    mask[k, j, i] = inside

        # Transform the image to the grid
        transformed = transform.TransformForward(spec, cs, bbox, 0,
                                                 image.as_double(),
                                                 background.as_double(), mask)
        igrid = transformed.profile()
        bgrid = transformed.background()

        # Get the sums and ensure they're the same
        eps = 1e-7
        sum_igrid = flex.sum(igrid)
        sum_bgrid = flex.sum(bgrid)
        sum_image = flex.sum(flex.double(flex.select(image, flags=mask)))
        sum_bkgrd = flex.sum(flex.double(flex.select(background, flags=mask)))
        try:
            assert abs(sum_igrid - sum_image) <= eps
            assert abs(sum_bgrid - sum_bkgrd) <= eps
        except Exception:
            print("Failed for: ", (x, y, z))
            raise
Example #23
0
    locations = ax.xaxis.get_major_locator().tick_values(images.min(), images.max())
    # Replace the tick at the 0th frame (which is meaningless) with one at the 1st.
    locations = np.where(locations == 0, 1, locations)
    ax.xaxis.set_major_locator(ticker.FixedLocator(locations))
    # Format the rotation magnitude (tumbling angle) axis tick labels in degrees.
    ax.yaxis.set_major_formatter(ticker.PercentFormatter(symbol=u"°", decimals=1))

    fig.savefig("tumbling_angle")


parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
    "experiment_list",
    metavar="refined.expt",
    type=str,
    help="A refined experiment list from DIALS, such as 'refined.expt'.",
)


if __name__ == "__main__":
    experiment_list = ExperimentList.from_file(parser.parse_args().experiment_list)

    if len(experiment_list) > 1:
        print(
            "Warning: You have supplied an input file containing multiple "
            "experiment models.  Only the first will be used."
        )

    print("Saving a plot of tumbling angle versus image number as tumbling_angle.png.")
    plot_angles(*get_angles(experiment_list[0].crystal))
Example #24
0
def experiments_masks(request, dials_data):
    filename = (dials_data(request.param["directory"]) /
                request.param["filename"]).strpath
    return ExperimentList.from_file(filename), request.param["masks"]
Example #25
0
            "Will not use cuda, but will use openmp, control with env var OMP_NUM_THREADS"
        )
    imgs, expt = forward_models.model_spots_from_pandas(
        df,
        mtz_file=Fname,
        mtz_col=Fcol,
        spectrum_override=spec,
        cuda=cuda,
        device_Id=dev,
        force_no_detector_thickness=no_det_thick,
        use_db=True)

    if args.compareData:
        print("Extracting data from experiment list %s..." %
              df.exp_name.values[0])
        El = ExperimentList.from_file(df.exp_name.values[0])
        iset = El[0].imageset
        # NOTE : assumes a multi-panel detector model, otherwise get_raw_data should have no arg, e.g. iset.get_raw_data()
        data = np.array([a.as_numpy_array() for a in iset.get_raw_data(0)])
        # divide the data by the adu factor
        data /= M.params.refiner.adu_per_photon
        # extract the background:
        print("Extracting background from data ...")
        bg = extract_background_from_data(data, args.j,
                                          (args.filtsz, args.filtsz))
        imgs_w_bg = imgs + bg  # this image is the extracted background and the optimized forward Bragg model

        # this image inclues data except for those pixels that were modeled during stage 1
        imgs2 = data.copy()
        P = M.all_pid
        F = M.all_fast
Example #26
0
import numpy as np
from cxid9114.parameters import ENERGY_CONV

defaultF = 1e3
mos_spread = 0
mos_doms = 1
Ncells_abc = 7, 7, 7
profile = "gauss"
beamsize = 0.1
exposure_s = 1
total_flux = 1e12
xtal_size = 0.0005


exper_name = "/Users/dermen/pinks/derek/refined_varying/stills2.expt"
El = ExperimentList.from_file(exper_name)
E = El[0]
crystal = E.crystal
DET = E.detector
BEAM = E.beam

symbol = "P212121"
sgi = sgtbx.space_group_info(symbol)
# TODO: allow override of ucell
symm = symmetry(unit_cell=crystal.get_unit_cell(), space_group_info=sgi)
miller_set = symm.build_miller_set(anomalous_flag=True, d_min=1.5, d_max=999)
# NOTE does build_miller_set automatically expand to p1 ? Does it obey systematic absences ?
# Note how to handle sys absences here ?
Famp = flex.double(np.ones(len(miller_set.indices())) * defaultF)
mil_ar = miller.array(miller_set=miller_set, data=Famp).set_observation_type_xray_amplitude()
Example #27
0
def test_blank_integrated_analysis(dials_data):
    expts = ExperimentList.from_file(dials_data("insulin_processed") /
                                     "integrated.expt",
                                     check_format=False)
    refl = flex.reflection_table.from_file(
        dials_data("insulin_processed") / "integrated.refl")
    results = detect_blanks.blank_integrated_analysis(refl,
                                                      expts[0].scan,
                                                      phi_step=5,
                                                      fractional_loss=0.1)
    assert results["data"][0]["x"] == [
        2.5,
        7.5,
        12.5,
        17.5,
        22.5,
        27.5,
        32.5,
        37.5,
        42.5,
    ]
    assert results["data"][0]["y"] == pytest.approx([
        27.903266149430973,
        25.832527090455052,
        26.9236206883069,
        26.50234804728626,
        26.41019377727383,
        25.810676090828185,
        24.844906790823064,
        25.89992001081651,
        25.580718362291474,
    ])
    assert not any(results["data"][0]["blank"])
    assert results["blank_regions"] == []

    # Now with some "blank" regions - make some of the reflections weak
    z = refl["xyzobs.px.value"].parts()[2]
    refl["intensity.prf.value"].set_selected(
        z < 10, refl["intensity.prf.value"] * 0.05)
    results = detect_blanks.blank_integrated_analysis(refl,
                                                      expts[0].scan,
                                                      phi_step=5,
                                                      fractional_loss=0.1)
    assert results["data"][0]["y"] == pytest.approx([
        1.3951633074715482,
        1.2916263545227527,
        26.9236206883069,
        26.50234804728626,
        26.41019377727383,
        25.810676090828185,
        24.844906790823064,
        25.89992001081651,
        25.580718362291474,
    ])
    assert results["data"][0]["blank"] == [
        True,
        True,
        False,
        False,
        False,
        False,
        False,
        False,
        False,
    ]
    assert results["blank_regions"] == [(0, 10)]

    # Unset the integrated_prf flags, so the analysis should instead use the umodified
    # intensity.sum.value instead
    refl.unset_flags(flex.bool(len(refl), True), refl.flags.integrated_prf)
    results = detect_blanks.blank_integrated_analysis(refl,
                                                      expts[0].scan,
                                                      phi_step=5,
                                                      fractional_loss=0.1)
    assert not any(results["data"][0]["blank"])
    assert results["blank_regions"] == []
def load_detector_from_expt(expt_file, exp_id=0):
    detector = ExperimentList.from_file(expt_file).detectors()[exp_id]
    return detector
Example #29
0
def test_forward_panel_edge(dials_data):
    expt = ExperimentList.from_file(
        dials_data("centroid_test_data").join(
            "imported_experiments.json").strpath)[0]

    # Get the models
    beam = expt.beam
    detector = expt.detector
    gonio = expt.goniometer
    scan = expt.scan

    # Set some parameters
    sigma_divergence = 0.00101229
    mosaicity = 0.157 * math.pi / 180
    n_sigma = 3
    grid_size = 7
    delta_divergence = n_sigma * sigma_divergence

    step_size = delta_divergence / grid_size
    delta_divergence2 = delta_divergence + step_size * 0.5
    delta_mosaicity = n_sigma * mosaicity

    # Create the bounding box calculator
    calculate_bbox = BBoxCalculator3D(beam, detector, gonio, scan,
                                      delta_divergence2, delta_mosaicity)

    # Initialise the transform
    spec = transform.TransformSpec(beam, detector, gonio, scan,
                                   sigma_divergence, mosaicity, n_sigma + 1,
                                   grid_size)

    assert len(detector) == 1

    s0 = beam.get_s0()
    m2 = gonio.get_rotation_axis()
    s0_length = matrix.col(beam.get_s0()).length()

    image_size = detector[0].get_image_size()
    refl_xy = [
        (0, 0),
        (2, 3),
        (4, 1000),
        (1000, 5),
        (image_size[0] - 1, image_size[1] - 1),
        (image_size[0] - 2, 1),
        (1, image_size[1] - 5),
        (1000, image_size[1] - 4),
        (image_size[0] - 3, 1000),
    ]

    for x, y in refl_xy:
        z = random.uniform(0, 9)

        # Get random s1, phi, panel
        s1 = matrix.col(detector[0].get_pixel_lab_coord(
            (x, y))).normalize() * s0_length
        phi = scan.get_angle_from_array_index(z, deg=False)
        panel = 0

        # Calculate the bounding box
        bbox = calculate_bbox(s1, z, panel)
        x0, x1, y0, y1, z0, z1 = bbox

        # Create the coordinate system
        cs = CoordinateSystem(m2, s0, s1, phi)

        # Create the image
        image = gaussian((z1 - z0, y1 - y0, x1 - x0), 10.0,
                         (z - z0, y - y0, x - x0), (2.0, 2.0, 2.0))

        # Mask for the foreground pixels
        refl_mask = image > 1e-3
        bg = flex.double(image.accessor())

        # Shoebox mask, i.e. mask out pixels that are outside the panel bounds
        shoebox_mask = flex.bool(image.accessor(), False)
        for j in range(y1 - y0):
            for i in range(x1 - x0):
                if (j + y0 >= 0 and j + y0 < image_size[1] and i + x0 >= 0
                        and i + x0 < image_size[0]):
                    for k in range(z1 - z0):
                        shoebox_mask[k, j, i] = True

        mask = refl_mask & shoebox_mask

        # from matplotlib import pyplot as plt
        # fig, axes = plt.subplots(ncols=refl_mask.focus()[0], nrows=4)
        # for i in range(refl_mask.focus()[0]):
        # axes[0, i].imshow(image.as_numpy_array()[i])
        # axes[1, i].imshow(refl_mask.as_numpy_array()[i])
        # axes[2, i].imshow(shoebox_mask.as_numpy_array()[i])
        # axes[3, i].imshow(mask.as_numpy_array()[i])
        # plt.show()

        # Transform the image to the grid
        transformed = transform.TransformForward(spec, cs, bbox, 0,
                                                 image.as_double(), bg,
                                                 refl_mask)
        grid = transformed.profile()

        mask = refl_mask & shoebox_mask
        # assert only pixels within the panel were transformed
        assert flex.sum(grid) == pytest.approx(flex.sum(
            image.select(mask.as_1d())),
                                               rel=0.01)
        # The total transformed counts should be less than the (unmasked) image counts
        assert flex.sum(grid) < flex.sum(image)

        # Transform the image to the grid, this time without a background
        transformed = transform.TransformForward(spec, cs, bbox, 0,
                                                 image.as_double(), refl_mask)
        grid = transformed.profile()

        mask = refl_mask & shoebox_mask
        # assert only pixels within the panel were transformed
        assert flex.sum(grid) == pytest.approx(flex.sum(
            image.select(mask.as_1d())),
                                               rel=0.01)
        # The total transformed counts should be less than the (unmasked) image counts
        assert flex.sum(grid) < flex.sum(image)