def compare(A: DataType, B: DataType):
    A = normalize_to_spectrum(A)
    attrs = A.attrs
    B = normalize_to_spectrum(B)

    # normalize total intensity
    TOTAL_INTENSITY = 1000000
    A = A / (A.sum(A.dims) / TOTAL_INTENSITY)
    B = B / (B.sum(B.dims) / TOTAL_INTENSITY)
    A.attrs.update(**attrs)

    tool = ComparisonTool(other=B)
    return tool.make_tool(A)
예제 #2
0
def estimate_prior_adjustment(data: DataType,
                              region: Union[dict, str] = None) -> float:
    r"""
    Estimates the parameters of a distribution generating the intensity
    histogram of pixels in a spectrum. In a perfectly linear, single-electron
    single-count detector, this would be a poisson distribution with
    \lambda=mean(counts) over the window. Despite this, we can estimate \lambda
    phenomenologically and verify that a Poisson distribution provides a good
    prior for the data, allowing us to perform statistical bootstrapping.

    You should use this with a spectrum that has uniform intensity, i.e. with a
    copper reference or similar.

    :param data:
    :return: returns sigma / mu, adjustment factor for the Poisson distribution
    """
    data = normalize_to_spectrum(data)

    if region is None:
        region = 'copper_prior'

    region = normalize_region(region)

    if 'cycle' in data.dims:
        data = data.sum('cycle')

    data = data.S.zero_spectrometer_edges().S.region_sel(region)
    values = data.values.ravel()
    values = values[np.where(values)]
    return np.std(values) / np.mean(values)
예제 #3
0
def determine_broadened_fermi_distribution(reference_data: DataType, fixed_temperature=True):
    """
    Determine the parameters for broadening by temperature and instrumental resolution
    for a piece of data.

    As a general rule, we first try to estimate the instrumental broadening and linewidth broadening
    according to calibrations provided for the beamline + instrument, as a starting point.

    We also calculate the thermal broadening to expect, and fit an edge location. Then we use a Gaussian
    convolved Fermi-Dirac distribution against an affine density of states near the Fermi level, with a constant
    offset background above the Fermi level as a simple but effective model when away from lineshapes.

    These parameters can be used to bootstrap a fit to actual data or used directly in ``normalize_by_fermi_dirac``.


    :param reference_data:
    :return:
    """
    params = {}

    if fixed_temperature:
        params['fd_width'] = {
            'value': reference_data.S.temp * K_BOLTZMANN_EV_KELVIN,
            'vary': False,
        }

    reference_data = normalize_to_spectrum(reference_data)

    sum_dims = list(reference_data.dims)
    sum_dims.remove('eV')

    return AffineBroadenedFD().guess_fit(reference_data.sum(sum_dims), params=params)
예제 #4
0
def summarize(data: DataType, axes=None):
    data = normalize_to_spectrum(data)

    axes_shapes_for_dims = {
        1: (1, 1),
        2: (1, 1),
        3: (2, 2),  # one extra here
        4: (3, 2),  # corresponds to 4 choose 2 axes
    }

    if axes is None:
        fig, axes = plt.subplots(axes_shapes_for_dims.get(len(data.dims)), figsize=(8, 8))

    flat_axes = axes.ravel()
    combinations = list(itertools.combinations(data.dims, 2))
    for axi, combination in zip(flat_axes, combinations):
        data.sum(combination).plot(ax=axi)
        fancy_labels(axi)

    for i in range(len(combinations), len(flat_axes)):
        flat_axes[i].set_axis_off()

    return axes
예제 #5
0
def find_kf_by_mdc(slice: DataType, offset=0, **kwargs):
    """
    Offset is used to control the radial offset from the pocket for studies where
    you want to go slightly off the Fermi momentum
    :param slice:
    :param offset:
    :param kwargs:
    :return:
    """
    if isinstance(slice, xr.Dataset):
        slice = slice.data

    assert isinstance(slice, xr.DataArray)

    if 'eV' in slice.dims:
        slice = slice.sum('eV')

    lor = LorentzianModel()
    bkg = AffineBackgroundModel(prefix='b_')

    result = (lor + bkg).guess_fit(data=slice, params=kwargs)
    return result.params['center'].value + offset
예제 #6
0
def normalize_total(data: DataType):
    data = normalize_to_spectrum(data)

    return data / (data.sum(data.dims) / 1000000)
예제 #7
0
def broadcast_model(model_cls: Union[type, TypeIterable],
                    data: DataType,
                    broadcast_dims,
                    params=None,
                    progress=True,
                    dataset=True,
                    weights=None,
                    safe=False,
                    prefixes=None,
                    window=None,
                    multithread=False):
    """
    Perform a fit across a number of dimensions. Allows composite models as well as models
    defined and compiled through strings.
    :param model_cls:
    :param data:
    :param broadcast_dims:
    :param params:
    :param progress:
    :param dataset:
    :param weights:
    :param safe:
    :param window:
    :return:
    """
    if params is None:
        params = {}

    if isinstance(broadcast_dims, str):
        broadcast_dims = [broadcast_dims]

    data = normalize_to_spectrum(data)
    cs = {}
    for dim in broadcast_dims:
        cs[dim] = data.coords[dim]

    other_axes = set(data.dims).difference(set(broadcast_dims))
    template = data.sum(list(other_axes))
    template.values = np.ndarray(template.shape, dtype=np.object)

    residual = data.copy(deep=True)
    residual.values = np.zeros(residual.shape)

    model = compile_model(parse_model(model_cls),
                          params=params,
                          prefixes=prefixes)
    if isinstance(params, (list, tuple)):
        params = {}

    new_params = model.make_params()

    n_fits = np.prod(np.array(list(template.S.dshape.values())))

    identity = lambda x, *args, **kwargs: x
    wrap_progress = identity
    if progress:
        wrap_progress = tqdm_notebook

    _fit_func = functools.partial(_perform_fit,
                                  data=data,
                                  model=model,
                                  params=params,
                                  safe=safe,
                                  weights=weights,
                                  window=window)

    if multithread:
        with ProcessPoolExecutor() as executor:
            for fit_result, fit_residual, coords in executor.map(
                    _fit_func, template.T.iter_coords()):
                template.loc[coords] = fit_result
                residual.loc[coords] = fit_residual
    else:
        for indices, cut_coords in wrap_progress(
                template.T.enumerate_iter_coords(), desc='Fitting',
                total=n_fits):
            fit_result, fit_residual, _ = _fit_func(cut_coords)
            template.loc[cut_coords] = fit_result
            residual.loc[cut_coords] = fit_residual

    if dataset:
        return xr.Dataset(
            {
                'results': template,
                'data': data,
                'residual': residual,
                'norm_residual': residual / data,
            }, residual.coords)

    template.attrs['original_data'] = data
    return template