コード例 #1
0
ファイル: xray.py プロジェクト: tjmurdoch/refnx
def reduce_xrdml(f, bkg=None, scale=1, sample_length=None):
    """
    Reduces a Panalytical XRDML file

    Parameters
    ----------
    f: file-like object or string
        The specular reflectivity (XRDML) file of interest
    bkg: list
        A list of file-like objects or strings that contain background
        measurements. The background is assumed to have the same number of
        points as the specular reflectivity curve.  The backgrounds are
        averaged and subtracted from the specular reflectivity
    scale: float
        The direct beam intensity (cps)
    sample_length: None or float
        If None then no footprint correction is done. Otherwise the transverse
        footprint of the sample (mm).

    Returns
    -------
    specular_q, specular_r, specular_dr: np.ndarray
        The specular reflectivity as a function of momentum transfer, Q.
    """

    spec = parse_xrdml_file(f)

    reflectivity = spec['intensities'] / spec['count_time']
    reflectivity_s = np.sqrt(reflectivity) / spec['count_time']

    # do the background subtraction
    if bkg is not None:
        bkgds = [parse_xrdml_file(fi) for fi in bkg]

        bkgd_refs = np.r_[[bkgd['intensities'] for bkgd in bkgds]]
        bkgd_refs_s = np.r_[[np.sqrt(bkgd['intensities']) / bkgd['count_time']
                             for bkgd in bkgds]]
        bkgd_refs_var = bkgd_refs_s ** 2
        weights = 1. / bkgd_refs_var
        numerator = np.sum(bkgd_refs * weights, axis=0)
        denominator = np.sum(weights, axis=0)

        total_bkgd = numerator / denominator
        total_bkgd_s = np.sqrt(1 / denominator)

        reflectivity, reflectivity_s = EP.EPsub(reflectivity,
                                                reflectivity_s,
                                                total_bkgd,
                                                total_bkgd_s)

    # work out the Q values
    qx, qy, qz = general.q2(spec['omega'],
                            spec['twotheta'],
                            np.zeros_like(spec['omega']),
                            spec['wavelength'])

    # do a footprint correction
    if sample_length is not None:
        footprint_correction = general.beamfrac(np.array([XRR_BEAMWIDTH_SD]) *
                                                2.35,
                                                np.array([sample_length]),
                                                spec['omega'])
        reflectivity /= footprint_correction
        reflectivity_s /= footprint_correction

    # divide by the direct beam intensity
    # assumes that the direct beam intensity is enormous, so the counting
    # uncertainties in the scale factor are negligible.
    reflectivity /= scale
    reflectivity_s /= scale

    return qz, reflectivity, reflectivity_s
コード例 #2
0
def reduce_xrdml(f, bkg=None, scale=None, sample_length=None):
    """
    Reduces a Panalytical XRDML file

    Parameters
    ----------
    f: file-like object or string
        The specular reflectivity (XRDML) file of interest
    bkg: list
        A list of file-like objects or strings that contain background
        measurements. The background is assumed to have the same number of
        points as the specular reflectivity curve.  The backgrounds are
        averaged and subtracted from the specular reflectivity
    scale: float, None
        The direct beam intensity (cps). If `scale is None` then the dataset
        is scaled by the point with maximum intensity below Q = 0.0318 (Q_crit
        for Si at 8.048 keV).
    sample_length: None or float
        If None then no footprint correction is done. Otherwise the transverse
        footprint of the sample (mm).

    Returns
    -------
    dataset: refnx.dataset.ReflectDataset
        The specular reflectivity as a function of momentum transfer, Q.
    """

    spec = parse_xrdml_file(f)

    reflectivity = spec["intensities"] / spec["count_time"]
    reflectivity_s = np.sqrt(reflectivity) / spec["count_time"]

    # do the background subtraction
    if bkg is not None:
        bkgds = [parse_xrdml_file(fi) for fi in bkg]

        bkgd_refs = np.r_[[bkgd["intensities"] for bkgd in bkgds]]
        bkgd_refs_s = np.r_[[
            np.sqrt(bkgd["intensities"]) / bkgd["count_time"] for bkgd in bkgds
        ]]
        bkgd_refs_var = bkgd_refs_s**2
        weights = 1.0 / bkgd_refs_var
        numerator = np.sum(bkgd_refs * weights, axis=0)
        denominator = np.sum(weights, axis=0)

        total_bkgd = numerator / denominator
        total_bkgd_s = np.sqrt(1 / denominator)

        reflectivity, reflectivity_s = EP.EPsub(reflectivity, reflectivity_s,
                                                total_bkgd, total_bkgd_s)

    # work out the Q values
    qx, qy, qz = general.q2(
        spec["omega"],
        spec["twotheta"],
        np.zeros_like(spec["omega"]),
        spec["wavelength"],
    )

    # do a footprint correction
    if sample_length is not None:
        footprint_correction = general.beamfrac(
            np.array([XRR_BEAMWIDTH_SD]) * 2.35,
            np.array([sample_length]),
            spec["omega"],
        )
        reflectivity /= footprint_correction
        reflectivity_s /= footprint_correction

    # divide by the direct beam intensity
    # assumes that the direct beam intensity is enormous, so the counting
    # uncertainties in the scale factor are negligible.
    if scale is None:
        # no scale factor was specifed, so normalise by highest intensity point
        # below Qc for Silicon at 8.048 keV
        below_qc = qz[qz < 0.0318]
        if len(below_qc):
            scale = np.max(reflectivity[qz < 0.0318])

    reflectivity /= scale
    reflectivity_s /= scale

    d = ReflectDataset(data=(qz, reflectivity, reflectivity_s))

    return d
コード例 #3
0
def reduce_xrdml(f, bkg=None, scale=1, sample_length=None):
    """
    Reduces a Panalytical XRDML file

    Parameters
    ----------
    f: file-like object or string
        The specular reflectivity (XRDML) file of interest
    bkg: list
        A list of file-like objects or strings that contain background
        measurements. The background is assumed to have the same number of
        points as the specular reflectivity curve.  The backgrounds are
        averaged and subtracted from the specular reflectivity
    scale: float
        The direct beam intensity (cps)
    sample_length: None or float
        If None then no footprint correction is done. Otherwise the transverse
        footprint of the sample (mm).

    Returns
    -------
    specular_q, specular_r, specular_dr: np.ndarray
        The specular reflectivity as a function of momentum transfer, Q.
    """

    spec = parse_xrdml_file(f)

    reflectivity = spec['intensities'] / spec['count_time']
    reflectivity_s = np.sqrt(reflectivity) / spec['count_time']

    # do the background subtraction
    if bkg is not None:
        bkgds = [parse_xrdml_file(fi) for fi in bkg]

        bkgd_refs = np.r_[[bkgd['intensities'] for bkgd in bkgds]]
        bkgd_refs_s = np.r_[[
            np.sqrt(bkgd['intensities']) / bkgd['count_time'] for bkgd in bkgds
        ]]
        bkgd_refs_var = bkgd_refs_s**2
        weights = 1. / bkgd_refs_var
        numerator = np.sum(bkgd_refs * weights, axis=0)
        denominator = np.sum(weights, axis=0)

        total_bkgd = numerator / denominator
        total_bkgd_s = np.sqrt(1 / denominator)

        reflectivity, reflectivity_s = EP.EPsub(reflectivity, reflectivity_s,
                                                total_bkgd, total_bkgd_s)

    # work out the Q values
    qx, qy, qz = general.q2(spec['omega'], spec['twotheta'],
                            np.zeros_like(spec['omega']), spec['wavelength'])

    # do a footprint correction
    if sample_length is not None:
        footprint_correction = general.beamfrac(
            np.array([XRR_BEAMWIDTH_SD]) * 2.35, np.array([sample_length]),
            spec['omega'])
        reflectivity /= footprint_correction
        reflectivity_s /= footprint_correction

    # divide by the direct beam intensity
    # assumes that the direct beam intensity is enormous, so the counting
    # uncertainties in the scale factor are negligible.
    reflectivity /= scale
    reflectivity_s /= scale

    return qz, reflectivity, reflectivity_s