Ejemplo n.º 1
0
def _prepare_merged_reflection_table(experiments, reflections, d_min=None):
    """Filter the data and prepare a reflection table with merged data."""
    if ("inverse_scale_factor" in reflections[0]
            and "intensity.scale.value" in reflections[0]):
        logger.info("Attempting to perform absence checks on scaled data")
        reflections = filter_reflection_table(reflections[0],
                                              intensity_choice=["scale"],
                                              d_min=d_min)
        reflections["intensity"] = reflections["intensity.scale.value"]
        reflections["variance"] = reflections["intensity.scale.variance"]
    else:
        logger.info(
            "Attempting to perform absence checks on unscaled profile-integrated data"
        )
        reflections = filter_reflection_table(reflections[0],
                                              intensity_choice=["profile"],
                                              d_min=d_min)
        reflections["intensity"] = reflections["intensity.prf.value"]
        reflections["variance"] = reflections["intensity.prf.variance"]

    # now merge
    space_group = experiments[0].crystal.get_space_group()
    reflections["asu_miller_index"] = map_indices_to_asu(
        reflections["miller_index"], space_group)
    reflections["inverse_scale_factor"] = flex.double(reflections.size(), 1.0)
    merged = (_reflection_table_to_iobs(
        reflections, experiments[0].crystal.get_unit_cell(),
        space_group).merge_equivalents(use_internal_variance=False).array())
    merged_reflections = flex.reflection_table()
    merged_reflections["intensity"] = merged.data()
    merged_reflections["variance"] = merged.sigmas()**2
    merged_reflections["miller_index"] = merged.indices()
    return merged_reflections
Ejemplo n.º 2
0
    def from_reflections_and_experiments(cls, reflection_tables, experiments,
                                         params):
        """Construct the resolutionizer from native dials datatypes."""
        # add some assertions about data

        # do batch assignment (same functions as in dials.export)
        offsets = calculate_batch_offsets(experiments)
        reflection_tables = assign_batches_to_reflections(
            reflection_tables, offsets)
        batches = flex.int()
        intensities = flex.double()
        indices = flex.miller_index()
        variances = flex.double()
        for table in reflection_tables:
            if "intensity.scale.value" in table:
                table = filter_reflection_table(table, ["scale"],
                                                partiality_threshold=0.4)
                intensities.extend(table["intensity.scale.value"])
                variances.extend(table["intensity.scale.variance"])
            else:
                table = filter_reflection_table(table, ["profile"],
                                                partiality_threshold=0.4)
                intensities.extend(table["intensity.prf.value"])
                variances.extend(table["intensity.prf.variance"])
            indices.extend(table["miller_index"])
            batches.extend(table["batch"])

        crystal_symmetry = miller.crystal.symmetry(
            unit_cell=determine_best_unit_cell(experiments),
            space_group=experiments[0].crystal.get_space_group(),
            assert_is_compatible_unit_cell=False,
        )
        miller_set = miller.set(crystal_symmetry,
                                indices,
                                anomalous_flag=False)
        i_obs = miller.array(miller_set,
                             data=intensities,
                             sigmas=flex.sqrt(variances))
        i_obs.set_observation_type_xray_intensity()
        i_obs.set_info(miller.array_info(source="DIALS", source_type="refl"))

        ms = i_obs.customized_copy()
        batch_array = miller.array(ms, data=batches)

        if params.reference is not None:
            reference, _ = miller_array_from_mtz(params.reference,
                                                 anomalous=params.anomalous,
                                                 labels=params.labels)
        else:
            reference = None

        return cls(i_obs, params, batches=batch_array, reference=reference)
Ejemplo n.º 3
0
def test_filter_reflection_table():
    """Test the interface function"""
    reflections = generate_integrated_test_reflections()
    reflections = filter_reflection_table(reflections, ["sum"])
    assert "intensity.sum.value" in reflections
    assert "intensity.prf.value" not in reflections
    assert "intensity.scale.value" not in reflections
    reflections = generate_integrated_test_reflections()
    reflections = filter_reflection_table(reflections, ["profile"])
    assert "intensity.prf.value" in reflections
    assert "intensity.sum.value" not in reflections
    assert "intensity.scale.value" not in reflections
    reflections = generate_integrated_test_reflections()
    reflections = filter_reflection_table(reflections, ["scale"])
    assert "intensity.scale.value" in reflections
    assert "intensity.prf.value" not in reflections
    assert "intensity.sum.value" not in reflections
    reflections = generate_integrated_test_reflections()
    reflections = filter_reflection_table(reflections, ["sum", "profile"])
    assert "intensity.sum.value" in reflections
    assert "intensity.prf.value" in reflections
    assert "intensity.scale.value" not in reflections
    reflections = generate_integrated_test_reflections()
    reflections = filter_reflection_table(reflections, ["sum", "profile", "scale"])
    assert "intensity.sum.value" in reflections
    assert "intensity.prf.value" in reflections
    assert "intensity.scale.value" in reflections
    with pytest.raises(ValueError):
        reflections = filter_reflection_table(reflections, ["bad"])

    # try filter with profile option and no profiles
    reflections = generate_integrated_test_reflections()
    reflections.unset_flags(
        flex.bool(reflections.size(), True), reflections.flags.integrated_prf
    )
    reflections = filter_reflection_table(reflections, ["sum", "profile"])
    # should try profile but fail and so retry with just sum
    assert "intensity.sum.value" in reflections
    assert "intensity.prf.value" not in reflections

    reflections = generate_integrated_test_reflections()
    reflections.unset_flags(
        flex.bool(reflections.size(), True), reflections.flags.integrated_prf
    )
    reflections = filter_reflection_table(reflections, ["sum", "profile", "scale"])
    # should try profile but fail and so retry with just sum
    assert "intensity.sum.value" in reflections
    assert "intensity.prf.value" not in reflections
    assert "intensity.scale.value" in reflections

    reflections = generate_integrated_test_reflections()
    reflections.unset_flags(
        flex.bool(reflections.size(), True), reflections.flags.integrated_prf
    )
    with pytest.raises(ValueError):
        _ = filter_reflection_table(reflections, ["profile"])
Ejemplo n.º 4
0
    def read_experiments(experiments, reflection_table):
        """
        Get information from experiments and reflections
        """

        # Get space group and unit cell
        space_group = experiments[0].crystal.get_space_group()
        sgno = space_group.type().number()
        unit_cell_list = []
        for e in experiments:
            assert sgno == e.crystal.get_space_group().type().number()
            unit_cell_list.append(e.crystal.get_unit_cell())

        if len(unit_cell_list) > 1:
            # calc mean unit cell
            mean_parameters = [0, 0, 0, 0, 0, 0]
            for uc in unit_cell_list:
                for i in range(6):
                    mean_parameters[i] += uc.parameters()[i]
            for i in range(6):
                mean_parameters[i] /= len(unit_cell_list)
            mean_unit_cell = uctbx.unit_cell(mean_parameters)
        else:
            mean_unit_cell = unit_cell_list[0]

        # Require a dials scaled experiments file.
        filtered_table = filter_reflection_table(reflection_table, ["scale"])
        filtered_table["intensity"] = filtered_table["intensity.scale.value"]
        filtered_table["variance"] = filtered_table["intensity.scale.variance"]
        filtered_table["dataset"] = filtered_table["id"]
        filtered_table["image"] = (
            flex.floor(filtered_table["xyzobs.px.value"].parts()[2]).iround() +
            1)

        return filtered_table, mean_unit_cell, space_group
Ejemplo n.º 5
0
    def from_dials_data_files(cls, params, experiments, reflection_table):
        """Initialise the class from an experiment list and reflection table.

        Args:
            params: A damage-analysis phil params object
            experiments: An ExperimentList
            reflection_table: A reflection table.
        """
        reflection_table = filter_reflection_table(
            reflection_table, intensity_choice=["scale"], partiality_threshold=0.4
        )

        # get scaled intensities
        intensities = miller.array(
            miller.set(
                crystal.symmetry(
                    unit_cell=median_unit_cell(experiments),
                    space_group=experiments[0].crystal.get_space_group(),
                ),
                indices=reflection_table["miller_index"],
                anomalous_flag=params.anomalous,
            ),
            data=reflection_table["intensity.scale.value"],
            sigmas=flex.sqrt(reflection_table["intensity.scale.variance"]),
        )
        intensities.set_observation_type_xray_intensity()

        doses = flex.double()
        start_doses, doses_per_image = interpret_images_to_doses_options(
            experiments,
            params.dose.experiments.dose_per_image,
            params.dose.experiments.starting_doses,
            params.dose.experiments.shared_crystal,
        )
        logger.info(
            "Interpreting data using:\n  starting_doses=%s\n  dose_per_image=%s",
            ", ".join("%s" % i for i in start_doses)
            if len(set(start_doses)) > 1
            else " all %s" % str(start_doses[0]),
            ", ".join("%s" % i for i in doses_per_image)
            if len(set(doses_per_image)) > 1
            else " all %s" % str(doses_per_image[0]),
        )

        for expt, starting_dose, dose_per_img in zip(
            experiments, start_doses, doses_per_image
        ):
            refls = reflection_table.select(expt)
            imgno = flex.ceil(refls["xyzobs.px.value"].parts()[2])
            dose = (imgno * dose_per_img) + starting_dose
            doses.extend(dose)

        doses = doses.iround()

        return cls(intensities, doses, params)
Ejemplo n.º 6
0
def export_mtz(integrated_data, experiment_list, params):
    """Export data from integrated_data corresponding to experiment_list to an
    MTZ file hklout."""

    # if mtz filename is auto, then choose scaled.mtz or integrated.mtz
    if params.mtz.hklout in (None, Auto, "auto"):
        if ("intensity.scale.value"
                in integrated_data) and ("intensity.scale.variance"
                                         in integrated_data):
            params.mtz.hklout = "scaled.mtz"
            logger.info(
                "Data appears to be scaled, setting mtz.hklout = 'scaled.mtz'")
        else:
            params.mtz.hklout = "integrated.mtz"
            logger.info(
                "Data appears to be unscaled, setting mtz.hklout = 'integrated.mtz'"
            )

    # First get the experiment identifier information out of the data
    expids_in_table = integrated_data.experiment_identifiers()
    if not list(expids_in_table.keys()):
        reflection_tables = parse_multiple_datasets([integrated_data])
        experiment_list, refl_list = assign_unique_identifiers(
            experiment_list, reflection_tables)
        integrated_data = flex.reflection_table()
        for reflections in refl_list:
            integrated_data.extend(reflections)
        expids_in_table = integrated_data.experiment_identifiers()
    integrated_data.assert_experiment_identifiers_are_consistent(
        experiment_list)
    expids_in_list = list(experiment_list.identifiers())

    # Convert experiment_list to a real python list or else identity assumptions
    # fail like:
    #   assert experiment_list[0] is experiment_list[0]
    # And assumptions about added attributes break
    experiment_list = list(experiment_list)

    # Validate multi-experiment assumptions
    if len(experiment_list) > 1:
        # All experiments should match crystals, or else we need multiple crystals/datasets
        if not all(x.crystal == experiment_list[0].crystal
                   for x in experiment_list[1:]):
            logger.warning(
                "Experiment crystals differ. Using first experiment crystal for file-level data."
            )

        wavelengths = match_wavelengths(experiment_list)
        if len(wavelengths.keys()) > 1:
            logger.info(
                "Multiple wavelengths found: \n%s",
                "\n".join("  Wavlength: %.5f, experiment numbers: %s " %
                          (k, ",".join(map(str, v)))
                          for k, v in wavelengths.items()),
            )
    else:
        wavelengths = OrderedDict(
            {experiment_list[0].beam.get_wavelength(): [0]})

    # also only work correctly with one panel (for the moment)
    if any(len(experiment.detector) != 1 for experiment in experiment_list):
        logger.warning("Ignoring multiple panels in output MTZ")

    best_unit_cell = params.mtz.best_unit_cell
    if best_unit_cell is None:
        best_unit_cell = determine_best_unit_cell(experiment_list)
    integrated_data["d"] = best_unit_cell.d(integrated_data["miller_index"])

    # Clean up the data with the passed in options
    integrated_data = filter_reflection_table(
        integrated_data,
        intensity_choice=params.intensity,
        partiality_threshold=params.mtz.partiality_threshold,
        combine_partials=params.mtz.combine_partials,
        min_isigi=params.mtz.min_isigi,
        filter_ice_rings=params.mtz.filter_ice_rings,
        d_min=params.mtz.d_min,
    )

    # get batch offsets and image ranges - even for scanless experiments
    batch_offsets = [
        expt.scan.get_batch_offset() for expt in experiment_list
        if expt.scan is not None
    ]
    unique_offsets = set(batch_offsets)
    if len(set(unique_offsets)) <= 1:
        logger.debug("Calculating new batches")
        batch_offsets = calculate_batch_offsets(experiment_list)
        batch_starts = [
            e.scan.get_image_range()[0] if e.scan else 0
            for e in experiment_list
        ]
        effective_offsets = [
            o + s for o, s in zip(batch_offsets, batch_starts)
        ]
        unique_offsets = set(effective_offsets)
    else:
        logger.debug("Keeping existing batches")
    image_ranges = get_image_ranges(experiment_list)
    if len(unique_offsets) != len(batch_offsets):

        raise ValueError("Duplicate batch offsets detected: %s" % ", ".join(
            str(item)
            for item, count in Counter(batch_offsets).items() if count > 1))

    # Create the mtz file
    mtz_writer = UnmergedMTZWriter(
        experiment_list[0].crystal.get_space_group())

    # FIXME TODO for more than one experiment into an MTZ file:
    #
    # - add an epoch (or recover an epoch) from the scan and add this as an extra
    #   column to the MTZ file for scaling, so we know that the two lattices were
    #   integrated at the same time
    # ✓ decide a sensible BATCH increment to apply to the BATCH value between
    #   experiments and add this

    for id_ in expids_in_table.keys():
        # Grab our subset of the data
        loc = expids_in_list.index(
            expids_in_table[id_])  # get strid and use to find loc in list
        experiment = experiment_list[loc]
        if len(list(wavelengths.keys())) > 1:
            for i, (wl, exps) in enumerate(wavelengths.items()):
                if loc in exps:
                    wavelength = wl
                    dataset_id = i + 1
                    break
        else:
            wavelength = list(wavelengths.keys())[0]
            dataset_id = 1
        reflections = integrated_data.select(integrated_data["id"] == id_)
        batch_offset = batch_offsets[loc]
        image_range = image_ranges[loc]
        reflections = assign_batches_to_reflections([reflections],
                                                    [batch_offset])[0]
        experiment.data = dict(reflections)

        s0n = matrix.col(experiment.beam.get_s0()).normalize().elems
        logger.debug("Beam vector: %.4f %.4f %.4f" % s0n)

        mtz_writer.add_batch_list(
            image_range,
            experiment,
            wavelength,
            dataset_id,
            batch_offset=batch_offset,
            force_static_model=params.mtz.force_static_model,
        )

        # Create the batch offset array. This gives us an experiment (id)-dependent
        # batch offset to calculate the correct batch from image number.
        experiment.data["batch_offset"] = flex.int(len(experiment.data["id"]),
                                                   batch_offset)

        # Calculate whether we have a ROT value for this experiment, and set the column
        _, _, z = experiment.data["xyzcal.px"].parts()
        if experiment.scan:
            experiment.data[
                "ROT"] = experiment.scan.get_angle_from_array_index(z)
        else:
            experiment.data["ROT"] = z

    mtz_writer.add_crystal(
        crystal_name=params.mtz.crystal_name,
        project_name=params.mtz.project_name,
        unit_cell=best_unit_cell,
    )
    # Note: add unit cell here as may have changed basis since creating mtz.
    # For multi-wave unmerged mtz, we add an empty dataset for each wavelength,
    # but only write the data into the final dataset (for unmerged the batches
    # link the unmerged data to the individual wavelengths).
    for wavelength in wavelengths:
        mtz_writer.add_empty_dataset(wavelength)

    # Combine all of the experiment data columns before writing
    combined_data = {
        k: v.deep_copy()
        for k, v in experiment_list[0].data.items()
    }
    for experiment in experiment_list[1:]:
        for k, v in experiment.data.items():
            combined_data[k].extend(v)
    # ALL columns must be the same length
    assert len({len(v)
                for v in combined_data.values()
                }) == 1, "Column length mismatch"
    assert len(combined_data["id"]) == len(
        integrated_data["id"]), "Lost rows in split/combine"

    # Write all the data and columns to the mtz file
    mtz_writer.write_columns(combined_data)

    logger.info("Saving {} integrated reflections to {}".format(
        len(combined_data["id"]), params.mtz.hklout))
    mtz_file = mtz_writer.mtz_file
    mtz_file.write(params.mtz.hklout)

    return mtz_file
Ejemplo n.º 7
0
def merge(
    experiments,
    reflections,
    d_min=None,
    d_max=None,
    combine_partials=True,
    partiality_threshold=0.4,
    best_unit_cell=None,
    anomalous=True,
    use_internal_variance=False,
    assess_space_group=False,
    n_bins=20,
):
    """
    Merge reflection table data and generate a summary of the merging statistics.

    This procedure filters the input data, merges the data (normal and optionally
    anomalous), assesses the space group symmetry and generates a summary
    of the merging statistics.
    """

    logger.info("\nMerging scaled reflection data\n")
    # first filter bad reflections using dials.util.filter methods
    reflections = filter_reflection_table(
        reflections,
        intensity_choice=["scale"],
        d_min=d_min,
        d_max=d_max,
        combine_partials=combine_partials,
        partiality_threshold=partiality_threshold,
    )
    # ^ scale factor has been applied, so now set to 1.0 - okay as not
    # going to output scale factor in merged mtz.
    reflections["inverse_scale_factor"] = flex.double(reflections.size(), 1.0)

    scaled_array = scaled_data_as_miller_array([reflections], experiments,
                                               best_unit_cell)
    # Note, merge_equivalents does not raise an error if data is unique.
    merged = scaled_array.merge_equivalents(
        use_internal_variance=use_internal_variance).array()
    merged_anom = None

    if anomalous:
        anomalous_scaled = scaled_array.as_anomalous_array()
        merged_anom = anomalous_scaled.merge_equivalents(
            use_internal_variance=use_internal_variance).array()

    # Before merge, do assessment of the space_group
    if assess_space_group:
        merged_reflections = flex.reflection_table()
        merged_reflections["intensity"] = merged.data()
        merged_reflections["variance"] = flex.pow2(merged.sigmas())
        merged_reflections["miller_index"] = merged.indices()
        logger.info("Running systematic absences check")
        run_systematic_absences_checks(experiments, merged_reflections)

    try:
        stats, anom_stats = merging_stats_from_scaled_array(
            scaled_array,
            n_bins,
            use_internal_variance,
        )
    except DialsMergingStatisticsError as e:
        logger.error(e, exc_info=True)
        stats_summary = None
    else:
        stats_summary = make_merging_statistics_summary(stats)
        stats_summary += table_1_summary(stats, anom_stats)

    return merged, merged_anom, stats_summary
Ejemplo n.º 8
0
def run_sys_abs_checks(experiments, reflections, d_min=None, significance_level=0.95):
    """Check for systematic absences in the data for the laue group.

    Select the good data, merge, test screw axes and score possible space
    groups. The crystals are updated with the most likely space group.
    """

    if (
        "inverse_scale_factor" in reflections[0]
        and "intensity.scale.value" in reflections[0]
    ):
        logger.info("Attempting to perform absence checks on scaled data")
        reflections = filter_reflection_table(
            reflections[0], intensity_choice=["scale"], d_min=d_min
        )
        reflections["intensity"] = reflections["intensity.scale.value"]
        reflections["variance"] = reflections["intensity.scale.variance"]
    else:
        logger.info(
            "Attempting to perform absence checks on unscaled profile-integrated data"
        )
        reflections = filter_reflection_table(
            reflections[0], intensity_choice=["profile"], d_min=d_min
        )
        reflections["intensity"] = reflections["intensity.prf.value"]
        reflections["variance"] = reflections["intensity.prf.variance"]

    # now merge
    space_group = experiments[0].crystal.get_space_group()
    reflections["asu_miller_index"] = map_indices_to_asu(
        reflections["miller_index"], space_group
    )
    reflections["inverse_scale_factor"] = flex.double(reflections.size(), 1.0)
    merged = (
        _reflection_table_to_iobs(
            reflections, experiments[0].crystal.get_unit_cell(), space_group
        )
        .merge_equivalents(use_internal_variance=False)
        .array()
    )
    merged_reflections = flex.reflection_table()
    merged_reflections["intensity"] = merged.data()
    merged_reflections["variance"] = merged.sigmas() ** 2
    merged_reflections["miller_index"] = merged.indices()

    # Get the laue class from the space group.
    laue_group = str(space_group.build_derived_patterson_group().info())
    logger.info("Laue group: %s", laue_group)
    if laue_group not in laue_groups:
        logger.info("No absences to check for this laue group")
        return

    # Score the screw axes.
    screw_axes, screw_axis_scores = score_screw_axes(
        laue_groups[laue_group], merged_reflections, significance_level
    )

    logger.info(
        simple_table(
            [
                [
                    a.name,
                    "%.3f" % score,
                    str(a.n_refl_used[0]),
                    str(a.n_refl_used[1]),
                    "%.3f" % a.mean_I,
                    "%.3f" % a.mean_I_abs,
                    "%.3f" % a.mean_I_sigma,
                    "%.3f" % a.mean_I_sigma_abs,
                ]
                for a, score in zip(screw_axes, screw_axis_scores)
            ],
            column_headers=[
                "Screw axis",
                "Score",
                "No. present",
                "No. absent",
                "<I> present",
                "<I> absent",
                "<I/sig> present",
                "<I/sig> absent",
            ],
        ).format()
    )

    # Score the space groups from the screw axis scores.
    space_groups, scores = score_space_groups(
        screw_axis_scores, laue_groups[laue_group]
    )

    logger.info(
        simple_table(
            [[sg, "%.4f" % score] for sg, score in zip(space_groups, scores)],
            column_headers=["Space group", "score"],
        ).format()
    )

    # Find the best space group and update the experiments.
    best_sg = space_groups[scores.index(max(scores))]
    logger.info("Recommended space group: %s", best_sg)
    if "enantiomorphic pairs" in laue_groups[laue_group]:
        if best_sg in laue_groups[laue_group]["enantiomorphic pairs"]:
            logger.info(
                "Space group with equivalent score (enantiomorphic pair): %s",
                laue_groups[laue_group]["enantiomorphic pairs"][best_sg],
            )

    new_sg = sgtbx.space_group_info(symbol=best_sg).group()
    for experiment in experiments:
        experiment.crystal.set_space_group(new_sg)
Ejemplo n.º 9
0
    def write(self, experiments, reflections):
        """
        Write the experiments and reflections to file
        """

        # if mmmcif filename is auto, then choose scaled.cif or integrated.cif
        if self.params.mmcif.hklout in (None, Auto, "auto"):
            if ("intensity.scale.value"
                    in reflections) and ("intensity.scale.variance"
                                         in reflections):
                filename = "scaled.cif"
                logger.info(
                    "Data appears to be scaled, setting mmcif.hklout = 'scaled_unmerged.cif'"
                )
            else:
                filename = "integrated.cif"
                logger.info(
                    "Data appears to be unscaled, setting mmcif.hklout = 'integrated.cif'"
                )

        # Select reflections
        selection = reflections.get_flags(reflections.flags.integrated,
                                          all=True)
        reflections = reflections.select(selection)

        # Filter out bad variances and other issues, but don't filter on ice rings
        # or alter partialities.

        ### Assumes you want to apply the lp and dqe corrections to sum and prf
        ### Do we want to combine partials?
        reflections = filter_reflection_table(
            reflections,
            self.params.intensity,
            combine_partials=False,
            partiality_threshold=0.0,
            d_min=self.params.mtz.d_min,
        )

        # Get the cif block
        cif_block = iotbx.cif.model.block()

        # Audit trail
        dials_version = dials.util.version.dials_version()
        cif_block["_audit.creation_method"] = dials_version
        cif_block["_audit.creation_date"] = datetime.date.today().isoformat()
        cif_block["_computing.data_reduction"] = (
            "%s (Winter, G. et al., 2018)" % dials_version)
        cif_block[
            "_publ.section_references"] = "Winter, G. et al. (2018) Acta Cryst. D74, 85-97."

        # Hard coding X-ray
        cif_block["_pdbx_diffrn_data_section.id"] = "dials"
        cif_block["_pdbx_diffrn_data_section.type_scattering"] = "x-ray"
        cif_block["_pdbx_diffrn_data_section.type_merged"] = "false"
        cif_block["_pdbx_diffrn_data_section.type_scaled"] = str(
            "scale" in self.params.intensity).lower()

        # FIXME Haven't put in any of these bits yet
        #
        #  Facility/beamline proposal tracking details
        #
        # cif_block["_pdbx_diffrn_data_section_experiment.ordinal"] = 1
        # cif_block["_pdbx_diffrn_data_section_experiment.data_section_id"] = "dials"
        # cif_block["_pdbx_diffrn_data_section_experiment.proposal_id"] = "<PROPOSAL ID>

        # Facility/beamline details for this data collection
        #
        # cif_block["_pdbx_diffrn_data_section_site.data_section_id"] = 'dials'
        # cif_block["_pdbx_diffrn_data_section_site.facility"] = "DIAMOND"
        # cif_block["_pdbx_diffrn_data_section_site.beamline"] = "VMX-M"
        # cif_block["_pdbx_diffrn_data_section_site.collection_date"] = scan.epochs()[0]
        # cif_block["_pdbx_diffrn_data_section_site.detector"] = detector[0].name()
        # cif_block["_pdbx_diffrn_data_section_site.detector_type"] = detector[0].type()

        # Write the crystal information
        cif_loop = iotbx.cif.model.loop(header=(
            "_pdbx_diffrn_unmerged_cell.ordinal",
            "_pdbx_diffrn_unmerged_cell.crystal_id",
            "_pdbx_diffrn_unmerged_cell.wavelength",
            "_pdbx_diffrn_unmerged_cell.cell_length_a",
            "_pdbx_diffrn_unmerged_cell.cell_length_b",
            "_pdbx_diffrn_unmerged_cell.cell_length_c",
            "_pdbx_diffrn_unmerged_cell.cell_angle_alpha",
            "_pdbx_diffrn_unmerged_cell.cell_angle_beta",
            "_pdbx_diffrn_unmerged_cell.cell_angle_gamma",
            "_pdbx_diffrn_unmerged_cell.Bravais_lattice",
        ))
        crystals = experiments.crystals()
        crystal_to_id = {crystal: i + 1 for i, crystal in enumerate(crystals)}
        for i, exp in enumerate(experiments):
            crystal = exp.crystal
            crystal_id = crystal_to_id[crystal]
            wavelength = exp.beam.get_wavelength()
            a, b, c, alpha, beta, gamma = crystal.get_unit_cell().parameters()
            latt_type = str(
                bravais_types.bravais_lattice(group=crystal.get_space_group()))
            cif_loop.add_row((i + 1, crystal_id, wavelength, a, b, c, alpha,
                              beta, gamma, latt_type))
            cif_block.add_loop(cif_loop)

        # Write the scan information
        cif_loop = iotbx.cif.model.loop(header=(
            "_pdbx_diffrn_scan.scan_id",
            "_pdbx_diffrn_scan.crystal_id",
            "_pdbx_diffrn_scan.image_id_begin",
            "_pdbx_diffrn_scan.image_id_end",
            "_pdbx_diffrn_scan.scan_angle_begin",
            "_pdbx_diffrn_scan.scan_angle_end",
        ))
        for i, exp in enumerate(experiments):
            scan = exp.scan
            crystal_id = crystal_to_id[exp.crystal]
            image_range = scan.get_image_range()
            osc_range = scan.get_oscillation_range(deg=True)
            cif_loop.add_row((
                i + 1,
                crystal_id,
                image_range[0],
                image_range[1],
                osc_range[0],
                osc_range[1],
            ))
            cif_block.add_loop(cif_loop)

        # Make a dict of unit_cell parameters
        unit_cell_parameters = {}
        if crystal.num_scan_points > 1:
            for i in range(crystal.num_scan_points):
                a, b, c, alpha, beta, gamma = crystal.get_unit_cell_at_scan_point(
                    i).parameters()
                unit_cell_parameters[i] = (a, b, c, alpha, beta, gamma)
        else:
            unit_cell_parameters[0] = (a, b, c, alpha, beta, gamma)

        ### _pdbx_diffrn_image_proc has been removed from the dictionary extension.
        ### Keeping this section commented out as it may be added back in some
        ### form in future
        #
        # Write the image data
        # scan = experiments[0].scan
        # z0 = scan.get_image_range()[0]
        #
        # cif_loop = iotbx.cif.model.loop(
        #  header=("_pdbx_diffrn_image_proc.image_id",
        #          "_pdbx_diffrn_image_proc.crystal_id",
        #          "_pdbx_diffrn_image_proc.image_number",
        #          "_pdbx_diffrn_image_proc.phi_value",
        #          "_pdbx_diffrn_image_proc.wavelength",
        #          "_pdbx_diffrn_image_proc.cell_length_a",
        #          "_pdbx_diffrn_image_proc.cell_length_b",
        #          "_pdbx_diffrn_image_proc.cell_length_c",
        #          "_pdbx_diffrn_image_proc.cell_angle_alpha",
        #          "_pdbx_diffrn_image_proc.cell_angle_beta",
        #          "_pdbx_diffrn_image_proc.cell_angle_gamma"))
        # for i in range(len(scan)):
        #  z = z0 + i
        #  if crystal.num_scan_points > 1:
        #    a, b, c, alpha, beta, gamma = unit_cell_parameters[i]
        #  else:
        #    a, b, c, alpha, beta, gamma = unit_cell_parameters[0]
        #  # phi is the angle at the image centre
        #  phi = scan.get_angle_from_image_index(z + 0.5, deg=True)
        #  cif_loop.add_row((i+1, 1, z, phi, wavelength,
        #                    a, b, c, alpha, beta, gamma))
        # cif_block.add_loop(cif_loop)

        # Write reflection data
        # Required columns
        header = (
            "_pdbx_diffrn_unmerged_refln.reflection_id",
            "_pdbx_diffrn_unmerged_refln.scan_id",
            "_pdbx_diffrn_unmerged_refln.image_id_begin",
            "_pdbx_diffrn_unmerged_refln.image_id_end",
            "_pdbx_diffrn_unmerged_refln.index_h",
            "_pdbx_diffrn_unmerged_refln.index_k",
            "_pdbx_diffrn_unmerged_refln.index_l",
        )

        headernames = {
            "scales": "_pdbx_diffrn_unmerged_refln.scale_value",
            "intensity.scale.value":
            "_pdbx_diffrn_unmerged_refln.intensity_meas",
            "intensity.scale.sigma":
            "_pdbx_diffrn_unmerged_refln.intensity_sigma",
            "intensity.sum.value": "_pdbx_diffrn_unmerged_refln.intensity_sum",
            "intensity.sum.sigma":
            "_pdbx_diffrn_unmerged_refln.intensity_sum_sigma",
            "intensity.prf.value": "_pdbx_diffrn_unmerged_refln.intensity_prf",
            "intensity.prf.sigma":
            "_pdbx_diffrn_unmerged_refln.intensity_prf_sigma",
            "angle": "_pdbx_diffrn_unmerged_refln.scan_angle_reflection",
            "partiality": "_pdbx_diffrn_unmerged_refln.partiality",
        }

        variables_present = []
        if "scale" in self.params.intensity:
            reflections["scales"] = 1.0 / reflections["inverse_scale_factor"]
            reflections["intensity.scale.sigma"] = (
                reflections["intensity.scale.variance"]**0.5)
            variables_present.extend(
                ["scales", "intensity.scale.value", "intensity.scale.sigma"])
        if "sum" in self.params.intensity:
            reflections["intensity.sum.sigma"] = (
                reflections["intensity.sum.variance"]**0.5)
            variables_present.extend(
                ["intensity.sum.value", "intensity.sum.sigma"])
        if "profile" in self.params.intensity:
            reflections["intensity.prf.sigma"] = (
                reflections["intensity.prf.variance"]**0.5)
            variables_present.extend(
                ["intensity.prf.value", "intensity.prf.sigma"])

        # Should always exist
        reflections["angle"] = reflections["xyzcal.mm"].parts()[2] * RAD2DEG
        variables_present.extend(["angle"])

        if "partiality" in reflections:
            variables_present.extend(["partiality"])

        for name in variables_present:
            if name in reflections:
                header += (headernames[name], )

        if "scale" in self.params.intensity:
            # Write dataset_statistics - first make a miller array
            crystal_symmetry = cctbxcrystal.symmetry(
                space_group=experiments[0].crystal.get_space_group(),
                unit_cell=experiments[0].crystal.get_unit_cell(),
            )
            miller_set = miller.set(
                crystal_symmetry=crystal_symmetry,
                indices=reflections["miller_index"],
                anomalous_flag=False,
            )
            i_obs = miller.array(miller_set,
                                 data=reflections["intensity.scale.value"])
            i_obs.set_observation_type_xray_intensity()
            i_obs.set_sigmas(reflections["intensity.scale.sigma"])
            i_obs.set_info(
                miller.array_info(source="DIALS",
                                  source_type="reflection_tables"))

            result = dataset_statistics(
                i_obs=i_obs,
                crystal_symmetry=crystal_symmetry,
                use_internal_variance=False,
                eliminate_sys_absent=False,
            )

            cif_block.update(result.as_cif_block())

        cif_loop = iotbx.cif.model.loop(header=header)

        for i, r in enumerate(reflections.rows()):
            refl_id = i + 1
            scan_id = r["id"] + 1
            _, _, _, _, z0, z1 = r["bbox"]
            h, k, l = r["miller_index"]
            variable_values = tuple((r[name]) for name in variables_present)
            cif_loop.add_row((refl_id, scan_id, z0, z1, h, k, l) +
                             variable_values)
        cif_block.add_loop(cif_loop)

        # Add the block
        self._cif["dials"] = cif_block

        # Print to file
        with open(filename, "w") as fh:
            self._cif.show(out=fh)

        # Log
        logger.info("Wrote reflections to %s" % filename)
Ejemplo n.º 10
0
    def run(self):
        """Execute the script."""
        start_time = time()

        # Parse the command line
        params, _ = self.parser.parse_args(show_diff_phil=False)

        # set up global reflections list
        reflections = flex.reflection_table()

        # loop through the input, building up the global lists
        reflections_list = flatten_reflections(params.input.reflections)
        experiments = flatten_experiments(params.input.experiments)

        reflections_list = parse_multiple_datasets(reflections_list)
        for refs in reflections_list:
            reflections.extend(refs)

        # Try to load the models and data
        nexp = len(experiments)
        if nexp == 0:
            print("No Experiments found in the input")
            self.parser.print_help()
            return
        if not reflections:
            print("No reflection data found in the input")
            self.parser.print_help()
            return

        self.check_input(reflections)

        # Configure the logging
        log.config(logfile=params.output.log)
        logger.info(dials_version())

        # Log the diff phil
        diff_phil = self.parser.diff_phil.as_str()
        if diff_phil != "":
            logger.info("The following parameters have been modified:\n")
            logger.info(diff_phil)

        # Convert to P 1?
        if params.refinement.triclinic:
            reflections, experiments = self.convert_to_P1(reflections, experiments)

        # Combine crystals?
        if params.refinement.combine_crystal_models and len(experiments) > 1:
            logger.info("Combining {0} crystal models".format(len(experiments)))
            experiments = self.combine_crystals(experiments)

        # Filter integrated centroids?
        if params.refinement.filter_integrated_centroids:
            reflections = self.filter_integrated_centroids(reflections)

        # Filter data if scaled to remove outliers
        if "inverse_scale_factor" in reflections:
            try:
                reflections = filter_reflection_table(reflections, ["scale"])
            except ValueError as e:
                logger.warn(e)
                logger.info(
                    "Filtering on scaled data failed, proceeding with integrated data."
                )

        # Get the refiner
        logger.info("Configuring refiner")
        refiner = self.create_refiner(params, reflections, experiments)

        # Refine the geometry
        if nexp == 1:
            logger.info("Performing refinement of a single Experiment...")
        else:
            logger.info("Performing refinement of {} Experiments...".format(nexp))
        refiner.run()

        # get the refined experiments
        experiments = refiner.get_experiments()
        crystals = experiments.crystals()

        if len(crystals) == 1:
            # output the refined model for information
            logger.info("")
            logger.info("Final refined crystal model:")
            logger.info(crystals[0])
            logger.info(self.cell_param_table(crystals[0]))

        # Save the refined experiments to file
        output_experiments_filename = params.output.experiments
        logger.info(
            "Saving refined experiments to {}".format(output_experiments_filename)
        )
        experiments.as_file(output_experiments_filename)

        # Create correlation plots
        if params.output.correlation_plot.filename is not None:
            create_correlation_plots(refiner, params.output)

        if params.output.cif is not None:
            self.generate_cif(crystals[0], refiner, filename=params.output.cif)

        if params.output.p4p is not None:
            self.generate_p4p(
                crystals[0], experiments[0].beam, filename=params.output.p4p
            )

        if params.output.mmcif is not None:
            self.generate_mmcif(crystals[0], refiner, filename=params.output.mmcif)

        # Log the total time taken
        logger.info("\nTotal time taken: {:.2f}s".format(time() - start_time))
Ejemplo n.º 11
0
def export_xds_ascii(integrated_data,
                     experiment_list,
                     params,
                     var_model=(1, 0)):
    """Export data from integrated_data corresponding to experiment_list to
    an XDS_ASCII.HKL formatted text file."""

    from dials.array_family import flex

    # for the moment assume (and assert) that we will convert data from exactly
    # one lattice...

    assert len(experiment_list) == 1
    # select reflections that are assigned to an experiment (i.e. non-negative id)

    integrated_data = integrated_data.select(integrated_data["id"] >= 0)
    assert max(integrated_data["id"]) == 0

    # export for xds_ascii should only be for non-scaled reflections
    assert any([
        i in integrated_data
        for i in ["intensity.sum.value", "intensity.prf.value"]
    ])

    integrated_data = filter_reflection_table(
        integrated_data,
        intensity_choice=params.intensity,
        partiality_threshold=params.mtz.partiality_threshold,
        combine_partials=params.mtz.combine_partials,
        min_isigi=params.mtz.min_isigi,
        filter_ice_rings=params.mtz.filter_ice_rings,
        d_min=params.mtz.d_min,
    )

    # calculate the scl = lp/dqe correction for outputting but don't apply it as
    # it has already been applied in filter_reflection_table
    integrated_data, scl = FilteringReductionMethods.calculate_lp_qe_correction_and_filter(
        integrated_data)

    experiment = experiment_list[0]

    # sort data before output
    nref = len(integrated_data["miller_index"])
    indices = flex.size_t_range(nref)

    import copy

    unique = copy.deepcopy(integrated_data["miller_index"])
    from cctbx.miller import map_to_asu

    map_to_asu(experiment.crystal.get_space_group().type(), False, unique)

    perm = sorted(indices, key=lambda k: unique[k])
    integrated_data = integrated_data.select(flex.size_t(perm))

    from scitbx import matrix
    from rstbx.cftbx.coordinate_frame_helpers import align_reference_frame

    assert not experiment.goniometer is None

    unit_cell = experiment.crystal.get_unit_cell()

    from scitbx.array_family import flex

    assert not experiment.scan is None
    image_range = experiment.scan.get_image_range()
    phi_start, phi_range = experiment.scan.get_image_oscillation(
        image_range[0])

    # gather the required information for the reflection file

    nref = len(integrated_data["miller_index"])
    zdet = flex.double(integrated_data["xyzcal.px"].parts()[2])

    miller_index = integrated_data["miller_index"]

    # profile correlation
    if "profile.correlation" in integrated_data:
        prof_corr = 100.0 * integrated_data["profile.correlation"]
    else:
        prof_corr = flex.double(nref, 100.0)

    # partiality
    if "partiality" in integrated_data:
        partiality = 100 * integrated_data["partiality"]
    else:
        prof_corr = flex.double(nref, 100.0)

    if "intensity.sum.value" in integrated_data:
        I = integrated_data["intensity.sum.value"]
        V = integrated_data["intensity.sum.variance"]
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)
    else:
        I = integrated_data["intensity.prf.value"]
        V = integrated_data["intensity.prf.variance"]
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)

    fout = open(params.xds_ascii.hklout, "w")

    # first write the header - in the "standard" coordinate frame...

    panel = experiment.detector[0]
    fast = panel.get_fast_axis()
    slow = panel.get_slow_axis()
    Rd = align_reference_frame(fast, (1, 0, 0), slow, (0, 1, 0))
    print("Coordinate change:")
    print("%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n" %
          Rd.elems)

    fast = Rd * fast
    slow = Rd * slow

    qx, qy = panel.get_pixel_size()
    nx, ny = panel.get_image_size()
    distance = matrix.col(Rd * panel.get_origin()).dot(
        matrix.col(Rd * panel.get_normal()))
    org = Rd * (matrix.col(panel.get_origin()) -
                distance * matrix.col(panel.get_normal()))
    orgx = -org.dot(fast) / qx
    orgy = -org.dot(slow) / qy

    UB = Rd * matrix.sqr(experiment.crystal.get_A())
    real_space_ABC = UB.inverse().elems

    axis = Rd * experiment.goniometer.get_rotation_axis()
    beam = Rd * experiment.beam.get_s0()
    cell_fmt = "%9.3f %9.3f %9.3f %7.3f %7.3f %7.3f"
    axis_fmt = "%9.3f %9.3f %9.3f"

    fout.write("\n".join([
        "!FORMAT=XDS_ASCII    MERGE=FALSE    FRIEDEL'S_LAW=TRUE",
        "!Generated by dials.export",
        "!DATA_RANGE= %d %d" % image_range,
        "!ROTATION_AXIS= %9.6f %9.6f %9.6f" % axis.elems,
        "!OSCILLATION_RANGE= %f" % phi_range,
        "!STARTING_ANGLE= %f" % phi_start,
        "!STARTING_FRAME= %d" % image_range[0],
        "!SPACE_GROUP_NUMBER= %d" %
        experiment.crystal.get_space_group().type().number(),
        "!UNIT_CELL_CONSTANTS= %s" % (cell_fmt % unit_cell.parameters()),
        "!UNIT_CELL_A-AXIS= %s" % (axis_fmt % real_space_ABC[0:3]),
        "!UNIT_CELL_B-AXIS= %s" % (axis_fmt % real_space_ABC[3:6]),
        "!UNIT_CELL_C-AXIS= %s" % (axis_fmt % real_space_ABC[6:9]),
        "!X-RAY_WAVELENGTH= %f" % experiment.beam.get_wavelength(),
        "!INCIDENT_BEAM_DIRECTION= %f %f %f" % beam.elems,
        "!NX= %d NY= %d QX= %f QY= %f" % (nx, ny, qx, qy),
        "!ORGX= %9.2f ORGY= %9.2f" % (orgx, orgy),
        "!DETECTOR_DISTANCE= %8.3f" % distance,
        "!DIRECTION_OF_DETECTOR_X-AXIS= %9.5f %9.5f %9.5f" % fast.elems,
        "!DIRECTION_OF_DETECTOR_Y-AXIS= %9.5f %9.5f %9.5f" % slow.elems,
        "!VARIANCE_MODEL= %7.3e %7.3e" % var_model,
        "!NUMBER_OF_ITEMS_IN_EACH_DATA_RECORD=12",
        "!ITEM_H=1",
        "!ITEM_K=2",
        "!ITEM_L=3",
        "!ITEM_IOBS=4",
        "!ITEM_SIGMA(IOBS)=5",
        "!ITEM_XD=6",
        "!ITEM_YD=7",
        "!ITEM_ZD=8",
        "!ITEM_RLP=9",
        "!ITEM_PEAK=10",
        "!ITEM_CORR=11",
        "!ITEM_PSI=12",
        "!END_OF_HEADER",
        "",
    ]))

    # then write the data records

    s0 = Rd * matrix.col(experiment.beam.get_s0())

    for j in range(nref):
        x, y, z = integrated_data["xyzcal.px"][j]
        phi = phi_start + z * phi_range
        h, k, l = miller_index[j]
        X = (UB * (h, k, l)).rotate(axis, phi, deg=True)
        s = s0 + X
        g = s.cross(s0).normalize()
        f = (s - s0).normalize()

        # find component of beam perpendicular to f, e
        e = -(s + s0).normalize()
        if h == k and k == l:
            u = (h, -h, 0)
        else:
            u = (k - l, l - h, h - k)
        q = ((matrix.col(u).transpose() *
              UB.inverse()).normalize().transpose().rotate(axis, phi,
                                                           deg=True))

        psi = q.angle(g, deg=True)
        if q.dot(e) < 0:
            psi *= -1

        fout.write("%d %d %d %f %f %f %f %f %f %.1f %.1f %f\n" % (
            h,
            k,
            l,
            I[j],
            sigI[j],
            x,
            y,
            z,
            scl[j],
            partiality[j],
            prof_corr[j],
            psi,
        ))

    fout.write("!END_OF_DATA\n")
    fout.close()
    logger.info("Output %d reflections to %s" %
                (nref, params.xds_ascii.hklout))
Ejemplo n.º 12
0
def _export_experiment(filename,
                       integrated_data,
                       experiment,
                       params,
                       var_model=(1, 0)):
    # type: (str, flex.reflection_table, dxtbx.model.Experiment, libtbx.phil.scope_extract, Tuple)
    """Export a single experiment to an XDS_ASCII.HKL format file.

    Args:
        filename: The file to write to
        integrated_data: The reflection table, pre-selected to one experiment
        experiment: The experiment list entry to export
        params: The PHIL configuration object
        var_model:
    """
    # export for xds_ascii should only be for non-scaled reflections
    assert any(i in integrated_data
               for i in ["intensity.sum.value", "intensity.prf.value"])
    # Handle requesting profile intensities (default via auto) but no column
    if "profile" in params.intensity and "intensity.prf.value" not in integrated_data:
        raise Sorry(
            "Requested profile intensity data but only summed present. Use intensity=sum."
        )

    integrated_data = filter_reflection_table(
        integrated_data,
        intensity_choice=params.intensity,
        partiality_threshold=params.mtz.partiality_threshold,
        combine_partials=params.mtz.combine_partials,
        min_isigi=params.mtz.min_isigi,
        filter_ice_rings=params.mtz.filter_ice_rings,
        d_min=params.mtz.d_min,
    )

    # calculate the scl = lp/dqe correction for outputting but don't apply it as
    # it has already been applied in filter_reflection_table
    (
        integrated_data,
        scl,
    ) = FilteringReductionMethods.calculate_lp_qe_correction_and_filter(
        integrated_data)

    # sort data before output
    nref = len(integrated_data["miller_index"])
    indices = flex.size_t_range(nref)

    unique = copy.deepcopy(integrated_data["miller_index"])

    map_to_asu(experiment.crystal.get_space_group().type(), False, unique)

    perm = sorted(indices, key=lambda k: unique[k])
    integrated_data = integrated_data.select(flex.size_t(perm))

    if experiment.goniometer is None:
        print(
            "Warning: No goniometer. Experimentally exporting with (1 0 0) axis"
        )

    unit_cell = experiment.crystal.get_unit_cell()

    if experiment.scan is None:
        print(
            "Warning: No Scan. Experimentally exporting no-oscillation values")
        image_range = (1, 1)
        phi_start, phi_range = 0.0, 0.0
    else:
        image_range = experiment.scan.get_image_range()
        phi_start, phi_range = experiment.scan.get_image_oscillation(
            image_range[0])

    # gather the required information for the reflection file

    nref = len(integrated_data["miller_index"])

    miller_index = integrated_data["miller_index"]

    # profile correlation
    if "profile.correlation" in integrated_data:
        prof_corr = 100.0 * integrated_data["profile.correlation"]
    else:
        prof_corr = flex.double(nref, 100.0)

    # partiality
    if "partiality" in integrated_data:
        partiality = 100 * integrated_data["partiality"]
    else:
        prof_corr = flex.double(nref, 100.0)

    if "intensity.sum.value" in integrated_data:
        I = integrated_data["intensity.sum.value"]
        V = integrated_data["intensity.sum.variance"]
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)
    else:
        I = integrated_data["intensity.prf.value"]
        V = integrated_data["intensity.prf.variance"]
        assert V.all_gt(0)
        V = var_model[0] * (V + var_model[1] * I * I)
        sigI = flex.sqrt(V)

    fout = open(filename, "w")

    # first write the header - in the "standard" coordinate frame...

    panel = experiment.detector[0]
    fast = panel.get_fast_axis()
    slow = panel.get_slow_axis()
    Rd = align_reference_frame(fast, (1, 0, 0), slow, (0, 1, 0))
    print("Coordinate change:")
    print("%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n%5.2f %5.2f %5.2f\n" %
          Rd.elems)

    fast = Rd * fast
    slow = Rd * slow

    qx, qy = panel.get_pixel_size()
    nx, ny = panel.get_image_size()
    distance = matrix.col(Rd * panel.get_origin()).dot(
        matrix.col(Rd * panel.get_normal()))
    org = Rd * (matrix.col(panel.get_origin()) -
                distance * matrix.col(panel.get_normal()))
    orgx = -org.dot(fast) / qx
    orgy = -org.dot(slow) / qy

    UB = Rd * matrix.sqr(experiment.crystal.get_A())
    real_space_ABC = UB.inverse().elems

    if experiment.goniometer is not None:
        axis = Rd * experiment.goniometer.get_rotation_axis()
    else:
        axis = Rd * (1, 0, 0)

    beam = Rd * experiment.beam.get_s0()
    cell_fmt = "%9.3f %9.3f %9.3f %7.3f %7.3f %7.3f"
    axis_fmt = "%9.3f %9.3f %9.3f"

    fout.write("\n".join([
        "!FORMAT=XDS_ASCII    MERGE=FALSE    FRIEDEL'S_LAW=TRUE",
        "!Generated by dials.export",
        "!DATA_RANGE= %d %d" % image_range,
        "!ROTATION_AXIS= %9.6f %9.6f %9.6f" % axis.elems,
        "!OSCILLATION_RANGE= %f" % phi_range,
        "!STARTING_ANGLE= %f" % phi_start,
        "!STARTING_FRAME= %d" % image_range[0],
        "!SPACE_GROUP_NUMBER= %d" %
        experiment.crystal.get_space_group().type().number(),
        "!UNIT_CELL_CONSTANTS= %s" % (cell_fmt % unit_cell.parameters()),
        "!UNIT_CELL_A-AXIS= %s" % (axis_fmt % real_space_ABC[0:3]),
        "!UNIT_CELL_B-AXIS= %s" % (axis_fmt % real_space_ABC[3:6]),
        "!UNIT_CELL_C-AXIS= %s" % (axis_fmt % real_space_ABC[6:9]),
        "!X-RAY_WAVELENGTH= %f" % experiment.beam.get_wavelength(),
        "!INCIDENT_BEAM_DIRECTION= %f %f %f" % beam.elems,
        "!NX= %d NY= %d QX= %f QY= %f" % (nx, ny, qx, qy),
        "!ORGX= %9.2f ORGY= %9.2f" % (orgx, orgy),
        "!DETECTOR_DISTANCE= %8.3f" % distance,
        "!DIRECTION_OF_DETECTOR_X-AXIS= %9.5f %9.5f %9.5f" % fast.elems,
        "!DIRECTION_OF_DETECTOR_Y-AXIS= %9.5f %9.5f %9.5f" % slow.elems,
        "!VARIANCE_MODEL= %7.3e %7.3e" % var_model,
        "!NUMBER_OF_ITEMS_IN_EACH_DATA_RECORD=12",
        "!ITEM_H=1",
        "!ITEM_K=2",
        "!ITEM_L=3",
        "!ITEM_IOBS=4",
        "!ITEM_SIGMA(IOBS)=5",
        "!ITEM_XD=6",
        "!ITEM_YD=7",
        "!ITEM_ZD=8",
        "!ITEM_RLP=9",
        "!ITEM_PEAK=10",
        "!ITEM_CORR=11",
        "!ITEM_PSI=12",
        "!END_OF_HEADER",
        "",
    ]))

    # then write the data records

    s0 = Rd * matrix.col(experiment.beam.get_s0())

    for j in range(nref):
        x, y, z = integrated_data["xyzcal.px"][j]
        phi = phi_start + z * phi_range
        h, k, l = miller_index[j]
        X = (UB * (h, k, l)).rotate(axis, phi, deg=True)
        s = s0 + X
        g = s.cross(s0).normalize()

        # find component of beam perpendicular to f, e
        e = -(s + s0).normalize()
        if h == k and k == l:
            u = (h, -h, 0)
        else:
            u = (k - l, l - h, h - k)
        q = ((matrix.col(u).transpose() *
              UB.inverse()).normalize().transpose().rotate(axis, phi,
                                                           deg=True))

        psi = q.angle(g, deg=True)
        if q.dot(e) < 0:
            psi *= -1

        fout.write("%d %d %d %f %f %f %f %f %f %.1f %.1f %f\n" % (
            h,
            k,
            l,
            I[j],
            sigI[j],
            x,
            y,
            z,
            scl[j],
            partiality[j],
            prof_corr[j],
            psi,
        ))

    fout.write("!END_OF_DATA\n")
    fout.close()
    logger.info("Output %d reflections to %s" % (nref, filename))
Ejemplo n.º 13
0
def merge_and_truncate(params, experiments, reflections):
    """Filter data, assess space group, run french wilson and Wilson stats."""

    logger.info("\nMerging scaled reflection data\n")
    # first filter bad reflections using dials.util.filter methods
    reflections = filter_reflection_table(
        reflections[0],
        intensity_choice=["scale"],
        d_min=params.d_min,
        combine_partials=params.combine_partials,
        partiality_threshold=params.partiality_threshold,
    )
    # ^ scale factor has been applied, so now set to 1.0 - okay as not
    # going to output scale factor in merged mtz.
    reflections["inverse_scale_factor"] = flex.double(reflections.size(), 1.0)

    scaled_array = scaled_data_as_miller_array([reflections], experiments)
    if params.anomalous:
        anomalous_scaled = scaled_array.as_anomalous_array()

    merged = scaled_array.merge_equivalents(
        use_internal_variance=params.merging.use_internal_variance).array()
    merged_anom = None
    if params.anomalous:
        merged_anom = anomalous_scaled.merge_equivalents(
            use_internal_variance=params.merging.use_internal_variance).array(
            )

    # Before merge, do some assessment of the space_group
    if params.assess_space_group:
        merged_reflections = flex.reflection_table()
        merged_reflections["intensity"] = merged.data()
        merged_reflections["variance"] = merged.sigmas()**2
        merged_reflections["miller_index"] = merged.indices()
        logger.info("Running systematic absences check")
        run_sys_abs_checks(experiments, merged_reflections)

    # Run the stats on truncating on anomalous or non anomalous?
    if params.anomalous:
        intensities = merged_anom
    else:
        intensities = merged

    assert intensities.is_xray_intensity_array()
    amplitudes = None
    anom_amplitudes = None
    if params.truncate:
        logger.info("\nScaling input intensities via French-Wilson Method")
        out = StringIO()
        if params.anomalous:
            anom_amplitudes = intensities.french_wilson(params=params, log=out)
            n_removed = intensities.size() - anom_amplitudes.size()
            assert anom_amplitudes.is_xray_amplitude_array()
            amplitudes = anom_amplitudes.as_non_anomalous_array()
            amplitudes = amplitudes.merge_equivalents().array()
        else:
            amplitudes = intensities.french_wilson(params=params, log=out)
            n_removed = intensities.size() - amplitudes.size()
        logger.info("Total number of rejected intensities %s", n_removed)
        logger.debug(out.getvalue())

    if params.reporting.wilson_stats:
        if not intensities.space_group().is_centric():
            wilson_scaling = data_statistics.wilson_scaling(
                miller_array=intensities,
                n_residues=params.n_residues)  # XXX default n_residues?
            # Divert output through logger - do with StringIO rather than
            # info_handle else get way too much whitespace in output.
            out = StringIO()
            wilson_scaling.show(out=out)
            logger.info(out.getvalue())

    # Apply wilson B to give absolute scale?

    # Show merging stats again.
    if params.reporting.merging_stats:
        stats, anom_stats = merging_stats_from_scaled_array(
            scaled_array, params.merging.n_bins,
            params.merging.use_internal_variance)
        if params.merging.anomalous:
            logger.info(make_merging_statistics_summary(anom_stats))
        else:
            logger.info(make_merging_statistics_summary(stats))

    return merged, merged_anom, amplitudes, anom_amplitudes
Ejemplo n.º 14
0
    def write(self, experiments, reflections):
        """
        Write the experiments and reflections to file
        """

        # if mmcif filename is auto, then choose scaled.cif or integrated.cif
        if self.params.mmcif.hklout in (None, Auto, "auto"):
            if ("intensity.scale.value"
                    in reflections) and ("intensity.scale.variance"
                                         in reflections):
                filename = "scaled.cif"
                logger.info(
                    "Data appears to be scaled, setting mmcif.hklout = 'scaled.cif'"
                )
            else:
                filename = "integrated.cif"
                logger.info(
                    "Data appears to be unscaled, setting mmcif.hklout = 'integrated.cif'"
                )
        else:
            filename = self.params.mmcif.hklout

        # Select reflections
        selection = reflections.get_flags(reflections.flags.integrated,
                                          all=True)
        reflections = reflections.select(selection)

        # Filter out bad variances and other issues, but don't filter on ice rings
        # or alter partialities.

        # Assumes you want to apply the lp and dqe corrections to sum and prf
        # Do we want to combine partials?
        reflections = filter_reflection_table(
            reflections,
            self.params.intensity,
            combine_partials=False,
            partiality_threshold=0.0,
            d_min=self.params.mtz.d_min,
        )

        # Get the cif block
        cif_block = iotbx.cif.model.block()

        # Audit trail
        dials_version = dials.util.version.dials_version()
        cif_block["_audit.creation_method"] = dials_version
        cif_block["_audit.creation_date"] = datetime.date.today().isoformat()
        cif_block["_computing.data_reduction"] = (
            "%s (Winter, G. et al., 2018)" % dials_version)
        cif_block[
            "_publ.section_references"] = "Winter, G. et al. (2018) Acta Cryst. D74, 85-97."
        if "scale" in self.params.intensity:
            cif_block[
                "_publ.section_references"] += "\nBeilsten-Edmands, J. et al. (2020) Acta Cryst. D76, 385-399."

        # Hard coding X-ray
        cif_block["_pdbx_diffrn_data_section.id"] = "dials"
        cif_block["_pdbx_diffrn_data_section.type_scattering"] = "x-ray"
        cif_block["_pdbx_diffrn_data_section.type_merged"] = "false"
        cif_block["_pdbx_diffrn_data_section.type_scaled"] = str(
            "scale" in self.params.intensity).lower()

        # FIXME finish metadata addition - detector and source details needed
        # http://mmcif.wwpdb.org/dictionaries/mmcif_pdbx_v50.dic/Categories/index.html

        # Add source information;
        # _diffrn_source.pdbx_wavelength_list = (list of wavelengths)
        # _diffrn_source.source = (general class of source e.g. synchrotron)
        # _diffrn_source.type = (specific beamline or instrument e.g DIAMOND BEAMLINE I04)

        wls = []
        epochs = []
        for exp in experiments:
            wls.append(round(exp.beam.get_wavelength(), 5))
            epochs.append(exp.scan.get_epochs()[0])
        unique_wls = set(wls)
        cif_block["_diffrn_source.pdbx_wavelength_list"] = ", ".join(
            str(w) for w in unique_wls)

        # Add detector information;
        # _diffrn_detector.detector  = (general class e.g. PIXEL, PLATE etc)
        # _diffrn_detector.pdbx_collection_date = (Date of collection yyyy-mm-dd)
        # _diffrn_detector.type = (full name of detector e.g. DECTRIS PILATUS3 2M)
        # One date is required, so if multiple just use the first date.
        min_epoch = min(epochs)
        date_str = time.strftime("%Y-%m-%d", time.gmtime(min_epoch))
        cif_block["_diffrn_detector.pdbx_collection_date"] = date_str

        # Write the crystal information
        cif_loop = iotbx.cif.model.loop(header=(
            "_pdbx_diffrn_unmerged_cell.ordinal",
            "_pdbx_diffrn_unmerged_cell.crystal_id",
            "_pdbx_diffrn_unmerged_cell.wavelength",
            "_pdbx_diffrn_unmerged_cell.cell_length_a",
            "_pdbx_diffrn_unmerged_cell.cell_length_b",
            "_pdbx_diffrn_unmerged_cell.cell_length_c",
            "_pdbx_diffrn_unmerged_cell.cell_angle_alpha",
            "_pdbx_diffrn_unmerged_cell.cell_angle_beta",
            "_pdbx_diffrn_unmerged_cell.cell_angle_gamma",
            "_pdbx_diffrn_unmerged_cell.Bravais_lattice",
        ))
        crystals = experiments.crystals()
        crystal_to_id = {crystal: i + 1 for i, crystal in enumerate(crystals)}
        for i, exp in enumerate(experiments):
            crystal = exp.crystal
            crystal_id = crystal_to_id[crystal]
            wavelength = exp.beam.get_wavelength()
            a, b, c, alpha, beta, gamma = crystal.get_unit_cell().parameters()
            latt_type = str(
                bravais_types.bravais_lattice(group=crystal.get_space_group()))
            cif_loop.add_row((i + 1, crystal_id, wavelength, a, b, c, alpha,
                              beta, gamma, latt_type))
            cif_block.add_loop(cif_loop)

        # Write the scan information
        cif_loop = iotbx.cif.model.loop(header=(
            "_pdbx_diffrn_scan.scan_id",
            "_pdbx_diffrn_scan.crystal_id",
            "_pdbx_diffrn_scan.image_id_begin",
            "_pdbx_diffrn_scan.image_id_end",
            "_pdbx_diffrn_scan.scan_angle_begin",
            "_pdbx_diffrn_scan.scan_angle_end",
        ))
        for i, exp in enumerate(experiments):
            scan = exp.scan
            crystal_id = crystal_to_id[exp.crystal]
            image_range = scan.get_image_range()
            osc_range = scan.get_oscillation_range(deg=True)
            cif_loop.add_row((
                i + 1,
                crystal_id,
                image_range[0],
                image_range[1],
                osc_range[0],
                osc_range[1],
            ))
            cif_block.add_loop(cif_loop)

        # Make a dict of unit_cell parameters
        unit_cell_parameters = {}
        if crystal.num_scan_points > 1:
            for i in range(crystal.num_scan_points):
                a, b, c, alpha, beta, gamma = crystal.get_unit_cell_at_scan_point(
                    i).parameters()
                unit_cell_parameters[i] = (a, b, c, alpha, beta, gamma)
        else:
            unit_cell_parameters[0] = (a, b, c, alpha, beta, gamma)

        ### _pdbx_diffrn_image_proc has been removed from the dictionary extension.
        ### Keeping this section commented out as it may be added back in some
        ### form in future
        #
        # Write the image data
        # scan = experiments[0].scan
        # z0 = scan.get_image_range()[0]
        #
        # cif_loop = iotbx.cif.model.loop(
        #  header=("_pdbx_diffrn_image_proc.image_id",
        #          "_pdbx_diffrn_image_proc.crystal_id",
        #          "_pdbx_diffrn_image_proc.image_number",
        #          "_pdbx_diffrn_image_proc.phi_value",
        #          "_pdbx_diffrn_image_proc.wavelength",
        #          "_pdbx_diffrn_image_proc.cell_length_a",
        #          "_pdbx_diffrn_image_proc.cell_length_b",
        #          "_pdbx_diffrn_image_proc.cell_length_c",
        #          "_pdbx_diffrn_image_proc.cell_angle_alpha",
        #          "_pdbx_diffrn_image_proc.cell_angle_beta",
        #          "_pdbx_diffrn_image_proc.cell_angle_gamma"))
        # for i in range(len(scan)):
        #  z = z0 + i
        #  if crystal.num_scan_points > 1:
        #    a, b, c, alpha, beta, gamma = unit_cell_parameters[i]
        #  else:
        #    a, b, c, alpha, beta, gamma = unit_cell_parameters[0]
        #  # phi is the angle at the image centre
        #  phi = scan.get_angle_from_image_index(z + 0.5, deg=True)
        #  cif_loop.add_row((i+1, 1, z, phi, wavelength,
        #                    a, b, c, alpha, beta, gamma))
        # cif_block.add_loop(cif_loop)

        # Write reflection data
        # Required columns
        header = (
            "_pdbx_diffrn_unmerged_refln.reflection_id",
            "_pdbx_diffrn_unmerged_refln.scan_id",
            "_pdbx_diffrn_unmerged_refln.image_id_begin",
            "_pdbx_diffrn_unmerged_refln.image_id_end",
            "_pdbx_diffrn_unmerged_refln.index_h",
            "_pdbx_diffrn_unmerged_refln.index_k",
            "_pdbx_diffrn_unmerged_refln.index_l",
        )

        headernames = {
            "scales": "_pdbx_diffrn_unmerged_refln.scale_value",
            "intensity.scale.value":
            "_pdbx_diffrn_unmerged_refln.intensity_meas",
            "intensity.scale.sigma":
            "_pdbx_diffrn_unmerged_refln.intensity_sigma",
            "intensity.sum.value": "_pdbx_diffrn_unmerged_refln.intensity_sum",
            "intensity.sum.sigma":
            "_pdbx_diffrn_unmerged_refln.intensity_sum_sigma",
            "intensity.prf.value": "_pdbx_diffrn_unmerged_refln.intensity_prf",
            "intensity.prf.sigma":
            "_pdbx_diffrn_unmerged_refln.intensity_prf_sigma",
            "angle": "_pdbx_diffrn_unmerged_refln.scan_angle_reflection",
            "partiality": "_pdbx_diffrn_unmerged_refln.partiality",
        }

        variables_present = []
        if "scale" in self.params.intensity:
            reflections["scales"] = 1.0 / reflections["inverse_scale_factor"]
            reflections["intensity.scale.sigma"] = flex.sqrt(
                reflections["intensity.scale.variance"])
            variables_present.extend(
                ["scales", "intensity.scale.value", "intensity.scale.sigma"])
        if "sum" in self.params.intensity:
            reflections["intensity.sum.sigma"] = flex.sqrt(
                reflections["intensity.sum.variance"])
            variables_present.extend(
                ["intensity.sum.value", "intensity.sum.sigma"])
        if "profile" in self.params.intensity:
            reflections["intensity.prf.sigma"] = flex.sqrt(
                reflections["intensity.prf.variance"])
            variables_present.extend(
                ["intensity.prf.value", "intensity.prf.sigma"])

        # Should always exist
        reflections["angle"] = reflections["xyzcal.mm"].parts()[2] * RAD2DEG
        variables_present.extend(["angle"])

        if "partiality" in reflections:
            variables_present.extend(["partiality"])

        for name in variables_present:
            if name in reflections:
                header += (headernames[name], )

        if "scale" in self.params.intensity:
            # Write dataset_statistics - first make a miller array
            crystal_symmetry = cctbxcrystal.symmetry(
                space_group=experiments[0].crystal.get_space_group(),
                unit_cell=experiments[0].crystal.get_unit_cell(),
            )
            miller_set = miller.set(
                crystal_symmetry=crystal_symmetry,
                indices=reflections["miller_index"],
                anomalous_flag=False,
            )
            i_obs = miller.array(miller_set,
                                 data=reflections["intensity.scale.value"])
            i_obs.set_observation_type_xray_intensity()
            i_obs.set_sigmas(reflections["intensity.scale.sigma"])
            i_obs.set_info(
                miller.array_info(source="DIALS",
                                  source_type="reflection_tables"))

            result = dataset_statistics(
                i_obs=i_obs,
                crystal_symmetry=crystal_symmetry,
                use_internal_variance=False,
                eliminate_sys_absent=False,
            )

            cif_block.update(result.as_cif_block())

        cif_loop = iotbx.cif.model.loop(header=header)

        for i, r in enumerate(reflections.rows()):
            refl_id = i + 1
            scan_id = r["id"] + 1
            _, _, _, _, z0, z1 = r["bbox"]
            h, k, l = r["miller_index"]
            variable_values = tuple((r[name]) for name in variables_present)
            cif_loop.add_row((refl_id, scan_id, z0, z1, h, k, l) +
                             variable_values)
        cif_block.add_loop(cif_loop)

        # Add the block
        self._cif["dials"] = cif_block

        # Print to file
        with open(filename, "w") as fh:
            self._cif.show(out=fh)

        # Log
        logger.info("Wrote reflections to %s" % filename)
Ejemplo n.º 15
0
    def make_cif_block(self, experiments, reflections):
        """Write the data to a cif block"""
        # Select reflections
        selection = reflections.get_flags(reflections.flags.integrated, all=True)
        reflections = reflections.select(selection)

        # Filter out bad variances and other issues, but don't filter on ice rings
        # or alter partialities.

        # Assumes you want to apply the lp and dqe corrections to sum and prf
        # Do we want to combine partials?
        reflections = filter_reflection_table(
            reflections,
            self.params.intensity,
            combine_partials=False,
            partiality_threshold=0.0,
            d_min=self.params.mtz.d_min,
        )

        # Get the cif block
        cif_block = iotbx.cif.model.block()

        # Audit trail
        dials_version = dials.util.version.dials_version()
        cif_block["_audit.revision_id"] = 1
        cif_block["_audit.creation_method"] = dials_version
        cif_block["_audit.creation_date"] = datetime.date.today().isoformat()
        cif_block["_entry.id"] = "DIALS"
        # add software loop
        mmcif_software_header = (
            "_software.pdbx_ordinal",
            "_software.citation_id",
            "_software.name",  # as defined at [1]
            "_software.version",
            "_software.type",
            "_software.classification",
            "_software.description",
        )

        mmcif_citations_header = (
            "_citation.id",
            "_citation.journal_abbrev",
            "_citation.journal_volume",
            "_citation.journal_issue",
            "_citation.page_first",
            "_citation.page_last",
            "_citation.year",
            "_citation.title",
        )

        software_loop = iotbx.cif.model.loop(header=mmcif_software_header)
        citations_loop = iotbx.cif.model.loop(header=mmcif_citations_header)

        software_loop.add_row(
            (
                1,
                1,
                "DIALS",
                dials_version,
                "package",
                "data processing",
                "Data processing and integration within the DIALS software package",
            )
        )
        citations_loop.add_row(
            (
                1,
                "Acta Cryst. D",
                74,
                2,
                85,
                97,
                2018,
                "DIALS: implementation and evaluation of a new integration package",
            )
        )
        if "scale" in self.params.intensity:
            software_loop.add_row(
                (
                    2,
                    2,
                    "DIALS",
                    dials_version,
                    "program",
                    "data scaling",
                    "Data scaling and merging within the DIALS software package",
                )
            )
            citations_loop.add_row(
                (
                    2,
                    "Acta Cryst. D",
                    76,
                    4,
                    385,
                    399,
                    2020,
                    "Scaling diffraction data in the DIALS software package: algorithms and new approaches for multi-crystal scaling",
                )
            )
        cif_block.add_loop(software_loop)
        cif_block.add_loop(citations_loop)

        # Hard coding X-ray
        if self.params.mmcif.pdb_version == "v5_next":
            cif_block["_pdbx_diffrn_data_section.id"] = "dials"
            cif_block["_pdbx_diffrn_data_section.type_scattering"] = "x-ray"
            cif_block["_pdbx_diffrn_data_section.type_merged"] = "false"
            cif_block["_pdbx_diffrn_data_section.type_scaled"] = str(
                "scale" in self.params.intensity
            ).lower()

        # FIXME finish metadata addition - detector and source details needed
        # http://mmcif.wwpdb.org/dictionaries/mmcif_pdbx_v50.dic/Categories/index.html

        # Add source information;
        # _diffrn_source.pdbx_wavelength_list = (list of wavelengths)
        # _diffrn_source.source = (general class of source e.g. synchrotron)
        # _diffrn_source.type = (specific beamline or instrument e.g DIAMOND BEAMLINE I04)

        wls = []
        epochs = []
        for exp in experiments:
            wls.append(round(exp.beam.get_wavelength(), 5))
            epochs.append(exp.scan.get_epochs()[0])
        unique_wls = set(wls)
        cif_block["_exptl_crystal.id"] = 1  # links to crystal_id
        cif_block["_diffrn.id"] = 1  # links to diffrn_id
        cif_block["_diffrn.crystal_id"] = 1
        cif_block["_diffrn_source.diffrn_id"] = 1
        cif_block["_diffrn_source.pdbx_wavelength_list"] = ", ".join(
            str(w) for w in unique_wls
        )

        # Add detector information;
        # _diffrn_detector.detector  = (general class e.g. PIXEL, PLATE etc)
        # _diffrn_detector.pdbx_collection_date = (Date of collection yyyy-mm-dd)
        # _diffrn_detector.type = (full name of detector e.g. DECTRIS PILATUS3 2M)
        # One date is required, so if multiple just use the first date.
        min_epoch = min(epochs)
        date_str = time.strftime("%Y-%m-%d", time.gmtime(min_epoch))
        cif_block["_diffrn_detector.diffrn_id"] = 1
        cif_block["_diffrn_detector.pdbx_collection_date"] = date_str

        # Write reflection data
        # Required columns
        header = (
            "_pdbx_diffrn_unmerged_refln.reflection_id",
            "_pdbx_diffrn_unmerged_refln.scan_id",
            "_pdbx_diffrn_unmerged_refln.image_id_begin",
            "_pdbx_diffrn_unmerged_refln.image_id_end",
            "_pdbx_diffrn_unmerged_refln.index_h",
            "_pdbx_diffrn_unmerged_refln.index_k",
            "_pdbx_diffrn_unmerged_refln.index_l",
        )

        extra_items = {
            "scales": ("_pdbx_diffrn_unmerged_refln.scale_value", "%5.3f"),
            "intensity.scale.value": (
                "_pdbx_diffrn_unmerged_refln.intensity_meas",
                "%8.3f",
            ),
            "intensity.scale.sigma": (
                "_pdbx_diffrn_unmerged_refln.intensity_sigma",
                "%8.3f",
            ),
            "intensity.sum.value": (
                "_pdbx_diffrn_unmerged_refln.intensity_sum",
                "%8.3f",
            ),
            "intensity.sum.sigma": (
                "_pdbx_diffrn_unmerged_refln.intensity_sum_sigma",
                "%8.3f",
            ),
            "intensity.prf.value": (
                "_pdbx_diffrn_unmerged_refln.intensity_prf",
                "%8.3f",
            ),
            "intensity.prf.sigma": (
                "_pdbx_diffrn_unmerged_refln.intensity_prf_sigma",
                "%8.3f",
            ),
            "angle": ("_pdbx_diffrn_unmerged_refln.scan_angle_reflection", "%7.4f"),
            "partiality": ("_pdbx_diffrn_unmerged_refln.partiality", "%7.4f"),
        }

        variables_present = []
        if "scale" in self.params.intensity:
            reflections["scales"] = 1.0 / reflections["inverse_scale_factor"]
            reflections["intensity.scale.sigma"] = flex.sqrt(
                reflections["intensity.scale.variance"]
            )
            variables_present.extend(
                ["scales", "intensity.scale.value", "intensity.scale.sigma"]
            )
        if "sum" in self.params.intensity:
            reflections["intensity.sum.sigma"] = flex.sqrt(
                reflections["intensity.sum.variance"]
            )
            variables_present.extend(["intensity.sum.value", "intensity.sum.sigma"])
        if "profile" in self.params.intensity:
            reflections["intensity.prf.sigma"] = flex.sqrt(
                reflections["intensity.prf.variance"]
            )
            variables_present.extend(["intensity.prf.value", "intensity.prf.sigma"])

        # Should always exist
        reflections["angle"] = reflections["xyzcal.mm"].parts()[2] * RAD2DEG
        variables_present.extend(["angle"])

        if "partiality" in reflections:
            variables_present.extend(["partiality"])

        for name in variables_present:
            if name in reflections:
                header += (extra_items[name][0],)
                self._fmt += " " + extra_items[name][1]

        if "scale" in self.params.intensity:
            # Write dataset_statistics - first make a miller array
            crystal_symmetry = cctbxcrystal.symmetry(
                space_group=experiments[0].crystal.get_space_group(),
                unit_cell=experiments[0].crystal.get_unit_cell(),
            )
            miller_set = miller.set(
                crystal_symmetry=crystal_symmetry,
                indices=reflections["miller_index"],
                anomalous_flag=False,
            )
            i_obs = miller.array(miller_set, data=reflections["intensity.scale.value"])
            i_obs.set_observation_type_xray_intensity()
            i_obs.set_sigmas(reflections["intensity.scale.sigma"])
            i_obs.set_info(
                miller.array_info(source="DIALS", source_type="reflection_tables")
            )

            result = dataset_statistics(
                i_obs=i_obs,
                crystal_symmetry=crystal_symmetry,
                use_internal_variance=False,
                eliminate_sys_absent=False,
            )
            merged_block = iotbx.cif.model.block()
            merged_block["_reflns.pdbx_ordinal"] = 1
            merged_block["_reflns.pdbx_diffrn_id"] = 1
            merged_block["_reflns.entry_id"] = "DIALS"
            merged_data = result.as_cif_block()
            merged_block.update(merged_data)
            cif_block.update(merged_block)

        # Write the crystal information
        # if v5, thats all so return
        if self.params.mmcif.pdb_version == "v5":
            return cif_block
        # continue if v5_next
        cif_loop = iotbx.cif.model.loop(
            header=(
                "_pdbx_diffrn_unmerged_cell.ordinal",
                "_pdbx_diffrn_unmerged_cell.crystal_id",
                "_pdbx_diffrn_unmerged_cell.wavelength",
                "_pdbx_diffrn_unmerged_cell.cell_length_a",
                "_pdbx_diffrn_unmerged_cell.cell_length_b",
                "_pdbx_diffrn_unmerged_cell.cell_length_c",
                "_pdbx_diffrn_unmerged_cell.cell_angle_alpha",
                "_pdbx_diffrn_unmerged_cell.cell_angle_beta",
                "_pdbx_diffrn_unmerged_cell.cell_angle_gamma",
                "_pdbx_diffrn_unmerged_cell.Bravais_lattice",
            )
        )
        crystals = experiments.crystals()
        crystal_to_id = {crystal: i + 1 for i, crystal in enumerate(crystals)}
        for i, exp in enumerate(experiments):
            crystal = exp.crystal
            crystal_id = crystal_to_id[crystal]
            wavelength = exp.beam.get_wavelength()
            a, b, c, alpha, beta, gamma = crystal.get_unit_cell().parameters()
            latt_type = str(
                bravais_types.bravais_lattice(group=crystal.get_space_group())
            )
            cif_loop.add_row(
                (i + 1, crystal_id, wavelength, a, b, c, alpha, beta, gamma, latt_type)
            )
            cif_block.add_loop(cif_loop)

        # Write the scan information
        cif_loop = iotbx.cif.model.loop(
            header=(
                "_pdbx_diffrn_scan.scan_id",
                "_pdbx_diffrn_scan.crystal_id",
                "_pdbx_diffrn_scan.image_id_begin",
                "_pdbx_diffrn_scan.image_id_end",
                "_pdbx_diffrn_scan.scan_angle_begin",
                "_pdbx_diffrn_scan.scan_angle_end",
            )
        )

        expid_to_scan_id = {exp.identifier: i + 1 for i, exp in enumerate(experiments)}

        for i, exp in enumerate(experiments):
            scan = exp.scan
            crystal_id = crystal_to_id[exp.crystal]
            image_range = scan.get_image_range()
            osc_range = scan.get_oscillation_range(deg=True)
            cif_loop.add_row(
                (
                    i + 1,
                    crystal_id,
                    image_range[0],
                    image_range[1],
                    osc_range[0],
                    osc_range[1],
                )
            )
            cif_block.add_loop(cif_loop)

        _, _, _, _, z0, z1 = reflections["bbox"].parts()
        h, k, l = [
            hkl.iround() for hkl in reflections["miller_index"].as_vec3_double().parts()
        ]
        # make scan id consistent with header as defined above
        scan_id = flex.int(reflections.size(), 0)
        for id_ in reflections.experiment_identifiers().keys():
            expid = reflections.experiment_identifiers()[id_]
            sel = reflections["id"] == id_
            scan_id.set_selected(sel, expid_to_scan_id[expid])

        loop_values = [
            flex.size_t_range(1, len(reflections) + 1),
            scan_id,
            z0,
            z1,
            h,
            k,
            l,
        ] + [reflections[name] for name in variables_present]
        cif_loop = iotbx.cif.model.loop(data=dict(zip(header, loop_values)))
        cif_block.add_loop(cif_loop)

        return cif_block
Ejemplo n.º 16
0
def export_sadabs(integrated_data, experiment_list, params):
    """Export data from integrated_data corresponding to experiment_list to a
    file for input to SADABS. FIXME probably need to make a .p4p file as
    well..."""

    from dials.array_family import flex

    # for the moment assume (and assert) that we will convert data from exactly
    # one lattice...

    assert len(experiment_list) == 1
    # select reflections that are assigned to an experiment (i.e. non-negative id)

    integrated_data = integrated_data.select(integrated_data["id"] >= 0)
    assert max(integrated_data["id"]) == 0

    # export for sadabs should only be for non-scaled reflections
    assert any(i in integrated_data
               for i in ["intensity.sum.value", "intensity.prf.value"])

    integrated_data = filter_reflection_table(
        integrated_data,
        intensity_choice=params.intensity,
        partiality_threshold=params.mtz.partiality_threshold,
        combine_partials=params.mtz.combine_partials,
        min_isigi=params.mtz.min_isigi,
        filter_ice_rings=params.mtz.filter_ice_rings,
        d_min=params.mtz.d_min,
    )

    experiment = experiment_list[0]
    assert experiment.scan is not None

    # sort data before output
    nref = len(integrated_data["miller_index"])
    indices = flex.size_t_range(nref)
    perm = sorted(indices, key=lambda k: integrated_data["miller_index"][k])
    integrated_data = integrated_data.select(flex.size_t(perm))

    assert experiment.goniometer is not None

    # Warn of unhelpful SADABS behaviour for certain multi-sequence data sets
    hkl_file_root, _ = os.path.splitext(params.sadabs.hklout)
    if not params.sadabs.run or re.search("_0+$", hkl_file_root):
        logger.warning(
            "It seems SADABS rejects multi-sequence data when the first "
            "filename ends "
            "'_0', '_00', etc., with a cryptic error message:\n"
            "\t'Inconsistent 2theta values in same scan'.\n"
            "You may need to begin the numbering of your SADABS HKL files from 1, "
            "rather than 0, and ensure the SADABS run/batch number is greater than 0."
        )

    axis = matrix.col(experiment.goniometer.get_rotation_axis_datum())

    beam = matrix.col(experiment.beam.get_sample_to_source_direction())
    s0 = matrix.col(experiment.beam.get_s0())

    F = matrix.sqr(experiment.goniometer.get_fixed_rotation())
    S = matrix.sqr(experiment.goniometer.get_setting_rotation())
    unit_cell = experiment.crystal.get_unit_cell()

    if params.debug:
        m_format = "%6.3f%6.3f%6.3f\n%6.3f%6.3f%6.3f\n%6.3f%6.3f%6.3f"
        c_format = "%.2f %.2f %.2f %.2f %.2f %.2f"

        logger.info(
            "Unit cell parameters from experiment: %s",
            c_format % unit_cell.parameters(),
        )
        logger.info(
            "Symmetry: %s",
            experiment.crystal.get_space_group().type().lookup_symbol())

        logger.info("Goniometer fixed matrix:\n%s", m_format % F.elems)
        logger.info("Goniometer setting matrix:\n%s", m_format % S.elems)
        logger.info("Goniometer scan axis:\n%6.3f%6.3f%6.3f", axis.elems)

    # detector scaling info
    assert len(experiment.detector) == 1
    panel = experiment.detector[0]
    dims = panel.get_image_size()
    pixel = panel.get_pixel_size()
    fast_axis = matrix.col(panel.get_fast_axis())
    slow_axis = matrix.col(panel.get_slow_axis())
    normal = fast_axis.cross(slow_axis)
    detector2t = s0.angle(normal, deg=True)

    if params.debug:
        logger.info("Detector fast, slow axes:")
        logger.info("%6.3f%6.3f%6.3f", fast_axis.elems)
        logger.info("%6.3f%6.3f%6.3f", slow_axis.elems)
        logger.info("Detector two theta (degrees): %.2f", detector2t)

    scl_x = 512.0 / (dims[0] * pixel[0])
    scl_y = 512.0 / (dims[1] * pixel[1])

    image_range = experiment.scan.get_image_range()

    from cctbx.array_family import flex as cflex  # implicit import # noqa: F401
    from cctbx.miller import map_to_asu_isym  # implicit import # noqa: F401

    # gather the required information for the reflection file

    nref = len(integrated_data["miller_index"])

    miller_index = integrated_data["miller_index"]

    if "intensity.sum.value" in integrated_data:
        I = integrated_data["intensity.sum.value"]
        V = integrated_data["intensity.sum.variance"]
        assert V.all_gt(0)
        sigI = flex.sqrt(V)
    else:
        I = integrated_data["intensity.prf.value"]
        V = integrated_data["intensity.prf.variance"]
        assert V.all_gt(0)
        sigI = flex.sqrt(V)

    # figure out scaling to make sure data fit into format 2F8.2 i.e. Imax < 1e5

    Imax = flex.max(I)

    if params.debug:
        logger.info("Maximum intensity in file: %8.2f", Imax)

    if Imax > 99999.0:
        scale = 99999.0 / Imax
        I = I * scale
        sigI = sigI * scale

    phi_start, phi_range = experiment.scan.get_image_oscillation(
        image_range[0])

    if params.sadabs.predict:
        logger.info("Using scan static predicted spot locations")
        from dials.algorithms.spot_prediction import ScanStaticReflectionPredictor

        predictor = ScanStaticReflectionPredictor(experiment)
        UB = experiment.crystal.get_A()
        predictor.for_reflection_table(integrated_data, UB)

    if not experiment.crystal.num_scan_points:
        logger.info("No scan varying model: use static")
        static = True
    else:
        static = False

    with open(params.sadabs.hklout, "w") as fout:

        for j in range(nref):

            h, k, l = miller_index[j]

            if params.sadabs.predict:
                x_mm, y_mm, z_rad = integrated_data["xyzcal.mm"][j]
            else:
                x_mm, y_mm, z_rad = integrated_data["xyzobs.mm.value"][j]

            z0 = integrated_data["xyzcal.px"][j][2]
            istol = int(round(10000 * unit_cell.stol((h, k, l))))

            if params.sadabs.predict or static:
                # work from a scan static model & assume perfect goniometer
                # FIXME maybe should work back in the option to predict spot positions
                UB = matrix.sqr(experiment.crystal.get_A())
                phi = phi_start + z0 * phi_range
                R = axis.axis_and_angle_as_r3_rotation_matrix(phi, deg=True)
                RUB = S * R * F * UB
            else:
                # properly compute RUB for every reflection
                UB = matrix.sqr(
                    experiment.crystal.get_A_at_scan_point(int(round(z0))))
                phi = phi_start + z0 * phi_range
                R = axis.axis_and_angle_as_r3_rotation_matrix(phi, deg=True)
                RUB = S * R * F * UB

            x = RUB * (h, k, l)
            s = (s0 + x).normalize()

            # can also compute s based on centre of mass of spot
            # s = (origin + x_mm * fast_axis + y_mm * slow_axis).normalize()

            astar = (RUB * (1, 0, 0)).normalize()
            bstar = (RUB * (0, 1, 0)).normalize()
            cstar = (RUB * (0, 0, 1)).normalize()

            ix = beam.dot(astar)
            iy = beam.dot(bstar)
            iz = beam.dot(cstar)

            dx = s.dot(astar)
            dy = s.dot(bstar)
            dz = s.dot(cstar)

            x = x_mm * scl_x
            y = y_mm * scl_y
            z = (z_rad * 180 / math.pi - phi_start) / phi_range

            fout.write("%4d%4d%4d%8.2f%8.2f%4d%8.5f%8.5f%8.5f%8.5f%8.5f%8.5f" %
                       (h, k, l, I[j], sigI[j], params.sadabs.run, ix, dx, iy,
                        dy, iz, dz))
            fout.write("%7.2f%7.2f%8.2f%7.2f%5d\n" %
                       (x, y, z, detector2t, istol))

    fout.close()
    logger.info("Output %d reflections to %s", nref, params.sadabs.hklout)
Ejemplo n.º 17
0
def export_shelx(scaled_data, experiment_list, params):
    """Export scaled data corresponding to experiment_list to
    a SHELX HKL formatted text file."""

    # Handle requesting profile intensities (default via auto) but no column
    if "profile" in params.intensity and "intensity.prf.value" not in scaled_data:
        raise Sorry(
            "Requested profile intensity data but only summed present. Use intensity=sum."
        )

    # use supplied best unit cell or that determined from experiment list to define d in reflection table.
    best_unit_cell = params.mtz.best_unit_cell
    if best_unit_cell is None:
        best_unit_cell = determine_best_unit_cell(experiment_list)
    else:
        logger.info("Using supplied unit cell across experiments : %s", best_unit_cell)
    scaled_data["d"] = best_unit_cell.d(scaled_data["miller_index"])

    # Clean up reflection table with mtz defaults (as in export_xds_ascii)
    scaled_data = filter_reflection_table(
        scaled_data,
        intensity_choice=params.intensity,
        partiality_threshold=params.mtz.partiality_threshold,
        combine_partials=params.mtz.combine_partials,
        min_isigi=params.mtz.min_isigi,
        filter_ice_rings=params.mtz.filter_ice_rings,
        d_min=params.mtz.d_min,
    )

    # Check that all experiments have the same space group
    if len({x.crystal.get_space_group().make_tidy() for x in experiment_list}) != 1:
        raise ValueError("Experiments do not have a unique space group")

    # Create miller set with space group from 1st crystal in experiment list and best unit cell
    miller_set = miller.set(
        crystal_symmetry=crystal.symmetry(
            unit_cell=best_unit_cell,
            space_group=experiment_list[0].crystal.get_space_group(),
        ),
        indices=scaled_data["miller_index"],
        anomalous_flag=False,
    )

    intensity_choice = (
        params.intensity[0] if params.intensity[0] != "profile" else "prf"
    )
    intensities, variances = (
        scaled_data["intensity." + intensity_choice + ".value"],
        scaled_data["intensity." + intensity_choice + ".variance"],
    )
    assert variances.all_gt(0)
    i_obs = miller.array(miller_set, data=intensities)
    i_obs.set_observation_type_xray_intensity()
    i_obs.set_sigmas(flex.sqrt(variances))

    # If requested scale up data to maximise use of scale_range
    if params.shelx.scale:
        max_val = max(
            i_obs.data().min_max_mean().max, i_obs.sigmas().min_max_mean().max
        )
        min_val = min(
            i_obs.data().min_max_mean().min, i_obs.sigmas().min_max_mean().min
        )
        min_scale = params.shelx.scale_range[0] / min_val
        max_scale = params.shelx.scale_range[1] / max_val
        scale = min(min_scale, max_scale)
        i_obs = i_obs.apply_scaling(factor=scale)

    # write the SHELX HKL file
    hkl_file = params.shelx.hklout
    with open(hkl_file, "w") as hkl_file_object:
        i_obs.export_as_shelx_hklf(
            file_object=hkl_file_object,
            scale_range=params.shelx.scale_range,
            normalise_if_format_overflow=True,
        )
    logger.info(f"Written {i_obs.size()} relflections to {hkl_file}")

    # and a stub of an .ins file with information from the .expt file
    _write_ins(
        experiment_list,
        best_unit_cell=params.mtz.best_unit_cell,
        ins_file=params.shelx.ins,
    )
    logger.info(f"Written {params.shelx.ins}")