예제 #1
0
파일: stitch.py 프로젝트: scipp/ess
def _stitch_event_data(
        item: sc.DataArray, frames: sc.Dataset, dim: str, new_dim: str,
        bins: Union[int, sc.Variable]) -> Union[sc.DataArray, dict]:

    edges = sc.flatten(sc.transpose(sc.concat(
        [frames["time_min"].data, frames["time_max"].data], 'dummy'),
                                    dims=['frame', 'dummy']),
                       to=dim)

    binned = sc.bin(item, edges=[edges])

    for i in range(frames.sizes["frame"]):
        # TODO: temporary fix working on the .value because read-only flag is set
        binned[dim, i *
               2].value.coords[dim] -= frames["time_correction"].data["frame",
                                                                      i]

    erase = None
    if new_dim != dim:
        binned.bins.coords[new_dim] = binned.bins.coords[dim]
        del binned.bins.coords[dim]
        erase = [dim]

    binned.masks['frame_gaps'] = (
        sc.arange(dim, 2 * frames.sizes["frame"] - 1) % 2).astype(bool)

    new_edges = sc.concat([(frames["time_min"]["frame", 0] -
                            frames["time_correction"]["frame", 0]).data,
                           (frames["time_max"]["frame", -1] -
                            frames["time_correction"]["frame", -1]).data],
                          new_dim)
    return sc.bin(binned, edges=[new_edges], erase=erase)
예제 #2
0
파일: operations.py 프로젝트: scipp/ess
def _shift(var, dim, forward, out_of_bounds):
    fill = var[dim, 0:1].copy()
    fill.values = np.full_like(fill.values, out_of_bounds)
    if forward:
        return sc.concat([fill, var[dim, :-1]], dim)
    else:
        return sc.concat([var[dim, 1:], fill], dim)
예제 #3
0
def to_bin_edges(x: sc.Variable, dim: str) -> sc.Variable:
    """
    Convert array centers to edges
    """
    idim = x.dims.index(dim)
    if x.shape[idim] < 2:
        one = 1.0 * x.unit
        return sc.concat([x[dim, 0:1] - one, x[dim, 0:1] + one], dim)
    else:
        center = to_bin_centers(x, dim)
        # Note: use range of 0:1 to keep dimension dim in the slice to avoid
        # switching round dimension order in concatenate step.
        left = center[dim, 0:1] - (x[dim, 1] - x[dim, 0])
        right = center[dim, -1] + (x[dim, -1] - x[dim, -2])
        return sc.concat([left, center, right], dim)
예제 #4
0
def make_unphysical_tof(t0, da):
    # 0 < t < t0, t < 0, t > t0
    tof = sc.concat([t0 - t0 / 2, sc.full_like(t0, -2), 2 * t0], 'tof')
    is_unphysical = sc.array(dims=['energy_transfer'],
                             values=[True, True, False]).broadcast(
                                 ['energy_transfer', 'spectrum'],
                                 [3, da.sizes['spectrum']])
    return tof, is_unphysical
예제 #5
0
def test_simple_case_any_naming():
    ds = _make_simple_dataset(u=2, v=10, w=10)
    grouped = groupby2D(ds, nx_target=5, ny_target=5, x='w', y='v', z='u')
    assert grouped['a'].shape == [2, 5, 5]
    projection = sc.array(dims=['v', 'w'], values=np.ones((5, 5))) * 4
    expected_data = sc.concat([projection, projection], dim='u')
    assert sc.all(
        sc.isclose(grouped['a'].data, expected_data,
                   atol=1e-14 * sc.units.one)).value
예제 #6
0
파일: tools_test.py 프로젝트: scipp/ess
def test_linlogspace_log_linear():
    q_loglin = tools.linlogspace(dim='qz',
                                 edges=[0.008, 0.03, 0.08],
                                 unit='1/angstrom',
                                 scale=['log', 'linear'],
                                 num=[16, 20])
    exp_log = sc.geomspace(dim='qz', start=0.008, stop=0.03, num=16, unit='1/angstrom')
    exp_lin = sc.linspace(dim='qz', start=0.03, stop=0.08, num=21, unit='1/angstrom')
    expected = sc.concat([exp_log, exp_lin['qz', 1:]], 'qz')
    assert sc.allclose(q_loglin, expected)
예제 #7
0
def _interpolate_transform(transform, xnew):
    # scipy can't interpolate with a single value
    if transform.sizes["time"] == 1:
        transform = sc.concat([transform, transform], dim="time")

    transform = sc.interpolate.interp1d(transform,
                                        "time",
                                        kind="previous",
                                        fill_value="extrapolate")(xnew=xnew)

    return transform
예제 #8
0
def _tof_correction(data: sc.DataArray, dim: str = 'tof') -> sc.DataArray:
    """
    A correction for the presense of the chopper with respect to the "true" ToF.
    Also fold the two pulses.
    TODO: generalise mechanism to fold any number of pulses.
    """
    tau = sc.to_unit(
        1 / (2 * data.coords['source_chopper'].value['frequency'].data),
        data.coords[dim].unit)
    chopper_phase = data.coords['source_chopper'].value['phase'].data
    tof_offset = tau * chopper_phase / (180.0 * sc.units.deg)
    # Make 2 bins, one for each pulse
    edges = sc.concat([-tof_offset, tau - tof_offset, 2 * tau - tof_offset],
                      dim)
    data = sc.bin(data, edges=[sc.to_unit(edges, data.coords[dim].unit)])
    # Make one offset for each bin
    offset = sc.concat([tof_offset, tof_offset - tau], dim)
    # Apply the offset on both bins
    data.bins.coords[dim] += offset
    # Rebin to exclude second (empty) pulse range
    return sc.bin(data, edges=[sc.concat([0. * sc.units.us, tau], dim)])
예제 #9
0
def linlogspace(dim: str,
                edges: Union[list, np.ndarray],
                scale: Union[list, str],
                num: Union[list, int],
                unit: str = None) -> sc.Variable:
    """
    Generate a 1d array of bin edges with a mixture of linear and/or logarithmic
    spacings.

    Examples:

    - Create linearly spaced edges (equivalent to `sc.linspace`):
        linlogspace(dim='x', edges=[0.008, 0.08], scale='linear', num=50, unit='m')
    - Create logarithmically spaced edges (equivalent to `sc.geomspace`):
        linlogspace(dim='x', edges=[0.008, 0.08], scale='log', num=50, unit='m')
    - Create edges with a linear and a logarithmic part:
        linlogspace(dim='x', edges=[1, 3, 8], scale=['linear', 'log'], num=[16, 20])


    :param dim: The dimension of the ouptut Variable.
    :param edges: The edges for the different parts of the mesh.
    :param scale: A string or list of strings specifying the scaling for the different
        parts of the mesh. Possible values for the scaling are `"linear"` and `"log"`.
        If a list is supplied, the length of the list must be one less than the length
        of the `edges` parameter.
    :param num: An integer or a list of integers specifying the number of points to use
        in each part of the mesh. If a list is supplied, the length of the list must be
        one less than the length of the `edges` parameter.
    :param unit: The unit of the ouptut Variable.
    """
    if not isinstance(scale, list):
        scale = [scale]
    if not isinstance(num, list):
        num = [num]
    if len(scale) != len(edges) - 1:
        raise ValueError(
            "Sizes do not match. The length of edges should be one "
            "greater than scale.")

    funcs = {"linear": sc.linspace, "log": sc.geomspace}
    grids = []
    for i in range(len(edges) - 1):
        # Skip the leading edge in the piece when concatenating
        start = int(i > 0)
        mesh = funcs[scale[i]](dim=dim,
                               start=edges[i],
                               stop=edges[i + 1],
                               num=num[i] + start,
                               unit=unit)
        grids.append(mesh[dim, start:])

    return sc.concat(grids, dim)
예제 #10
0
def get_full_transformation_matrix(group: Group,
                                   nexus: LoadFromNexus) -> sc.DataArray:
    """
    Get the 4x4 transformation matrix for a component, resulting
    from the full chain of transformations linked by "depends_on"
    attributes

    :param group: The HDF5 group of the component, containing depends_on
    :param nexus: wrap data access to hdf file or objects from json
    :return: 4x4 passive transformation matrix as a data array
    """
    transformations = []
    try:
        depends_on = nexus.load_scalar_string(group, "depends_on")
    except MissingDataset:
        depends_on = '.'
    _get_transformations(depends_on, transformations, group,
                         nexus.get_name(group), nexus)

    total_transform = sc.spatial.affine_transform(value=np.identity(4),
                                                  unit=sc.units.m)

    for transform in transformations:
        if isinstance(total_transform, sc.DataArray) and isinstance(
                transform, sc.DataArray):
            xnew = sc.datetimes(values=np.unique(
                sc.concat([
                    total_transform.coords["time"].to(unit=sc.units.ns,
                                                      copy=True),
                    transform.coords["time"].to(unit=sc.units.ns, copy=True),
                ],
                          dim="time").values),
                                dims=["time"],
                                unit=sc.units.ns)
            total_transform = _interpolate_transform(
                transform, xnew) * _interpolate_transform(
                    total_transform, xnew)
        else:
            total_transform = transform * total_transform

    return total_transform
예제 #11
0
def reduce_to_q(data, *, q_bins, reducer, wavelength_bands=None):
    """
    Example:
    >>> reduced = reduce_to_q(data, q_bins=q_bins, reducer=simple_reducer('spectrum'))  # noqa: E501
    """
    # TODO Backup of the coord is necessary until `convert` can keep original
    wavelength = data.coords['wavelength']
    data = scn.convert(data, 'wavelength', 'Q', scatter=True)
    if wavelength_bands is None:
        data = sc.histogram(data, q_bins)
        return reducer(data)
    data.coords['wavelength'] = wavelength
    bands = None
    for i in range(wavelength_bands.sizes['wavelength'] - 1):
        low = wavelength_bands['wavelength', i]
        high = wavelength_bands['wavelength', i + 1]
        band = sc.histogram(data['wavelength', low:high], q_bins)
        band = reducer(band)
        bands = sc.concat([bands, band],
                          'wavelength') if bands is not None else band
    bands.coords['wavelength'] = wavelength_bands
    return bands
예제 #12
0
def load_detector_data(event_data_groups: List[Group],
                       detector_groups: List[Group], nexus: LoadFromNexus,
                       quiet: bool,
                       bin_by_pixel: bool) -> Optional[sc.DataArray]:
    detectors = _load_data_from_each_nx_detector(detector_groups, nexus)
    detectors = _load_data_from_each_nx_event_data(detectors,
                                                   event_data_groups, nexus,
                                                   quiet)

    if not detectors:
        # If there were no data to load we are done
        return

    def get_detector_id(data: DetectorData):
        # Assume different detector banks do not have
        # intersecting ranges of detector ids
        if data.detector_ids is None:
            return 0
        return data.detector_ids.values[0]

    detectors.sort(key=get_detector_id)

    _create_empty_event_data(detectors)

    pixel_positions_loaded = all(
        [data.pixel_positions is not None for data in detectors])

    def _bin_events(data: DetectorData):
        if not bin_by_pixel:
            # If loading "raw" data, leave binned by pulse.
            return data.event_data
        if data.detector_ids is None:
            # If detector ids were not found in an associated detector group
            # we will just have to bin according to whatever
            # ids we have a events for (pixels with no recorded events
            # will not have a bin)
            event_id = data.event_data.bins.constituents['data'].coords[
                _detector_dimension]
            data.detector_ids = sc.array(dims=[_detector_dimension],
                                         values=np.unique(event_id.values))

        # Events in the NeXus file are effectively binned by pulse
        # (because they are recorded chronologically)
        # but for reduction it is more useful to bin by detector id
        # Broadcast pulse times to events
        data.event_data.bins.coords['pulse_time'] = sc.bins_like(
            data.event_data, fill_value=data.event_data.coords['pulse_time'])
        # TODO Look into using `erase=[_pulse_dimension]` instead of binning
        # underlying buffer. Must prove that performance can be unaffected.
        da = sc.bin(data.event_data.bins.constituents['data'],
                    groups=[data.detector_ids])
        # Add a single time-of-flight bin
        da = sc.DataArray(data=sc.broadcast(da.data,
                                            dims=da.dims + [_time_of_flight],
                                            shape=da.shape + [1]),
                          coords={_detector_dimension: data.detector_ids})
        if pixel_positions_loaded:
            # TODO: the name 'position' should probably not be hard-coded but moved
            # to a variable that cah be changed in a single place.
            da.coords['position'] = data.pixel_positions
        return da

    _dim = _detector_dimension if bin_by_pixel else _bank_dimension
    events = sc.concat([_bin_events(item) for item in detectors], _dim)

    if bin_by_pixel:
        _min_tof = events.bins.coords[_time_of_flight].min()
        _max_tof = events.bins.coords[_time_of_flight].max()
        # This can happen if there were no events in the file at all as sc.min will
        # return double_max and sc.max will return double_min
        if _min_tof.value >= _max_tof.value:
            _min_tof, _max_tof = _max_tof, _min_tof
        if np.issubdtype(type(_max_tof.value), np.integer):
            if _max_tof.value != np.iinfo(type(_max_tof.value)).max:
                _max_tof += sc.ones_like(_max_tof)
        else:
            if _max_tof.value != np.finfo(type(_max_tof.value)).max:
                _max_tof.value = np.nextafter(_max_tof.value, float("inf"))
        events.coords[_time_of_flight] = sc.concat([_min_tof, _max_tof],
                                                   _time_of_flight)

    return events
예제 #13
0
def convert_EventWorkspace_to_data_array(ws,
                                         load_pulse_times=True,
                                         advanced_geometry=False,
                                         load_run_logs=True,
                                         **ignored):
    dim, unit = validate_and_get_unit(ws.getAxis(0).getUnit())
    spec_dim, spec_coord = init_spec_axis(ws)
    nHist = ws.getNumberHistograms()
    _, data_unit = validate_and_get_unit(ws.YUnit(), allow_empty=True)

    n_event = ws.getNumberEvents()
    coord = sc.zeros(dims=['event'],
                     shape=[n_event],
                     unit=unit,
                     dtype=sc.DType.float64)
    weights = sc.ones(dims=['event'],
                      shape=[n_event],
                      unit=data_unit,
                      dtype=sc.DType.float32,
                      with_variances=True)
    pulse_times = sc.empty(dims=['event'],
                           shape=[n_event],
                           dtype=sc.DType.datetime64,
                           unit=sc.units.ns) if load_pulse_times else None

    begins = sc.zeros(dims=[spec_dim, dim],
                      shape=[nHist, 1],
                      dtype=sc.DType.int64)
    ends = begins.copy()
    current = 0
    for i in range(nHist):
        sp = ws.getSpectrum(i)
        size = sp.getNumberEvents()
        coord['event', current:current + size].values = sp.getTofs()
        if load_pulse_times:
            pulse_times['event', current:current +
                        size].values = sp.getPulseTimesAsNumpy()
        if _contains_weighted_events(sp):
            weights['event', current:current + size].values = sp.getWeights()
            weights['event',
                    current:current + size].variances = sp.getWeightErrors()
        begins.values[i] = current
        ends.values[i] = current + size
        current += size

    proto_events = {'data': weights, 'coords': {dim: coord}}
    if load_pulse_times:
        proto_events["coords"]["pulse_time"] = pulse_times
    events = sc.DataArray(**proto_events)

    coords_labs_data = _convert_MatrixWorkspace_info(
        ws, advanced_geometry=advanced_geometry, load_run_logs=load_run_logs)
    # For now we ignore potential finer bin edges to avoid creating too many
    # bins. Use just a single bin along dim and use extents given by workspace
    # edges.
    # TODO If there are events outside edges this might create bins with
    # events that are not within bin bounds. Consider using `bin` instead
    # of `bins`?
    edges = coords_labs_data['coords'][dim]
    # Using range slice of thickness 1 to avoid transposing 2-D coords
    coords_labs_data['coords'][dim] = sc.concat(
        [edges[dim, :1], edges[dim, -1:]], dim)

    coords_labs_data["data"] = sc.bins(begin=begins,
                                       end=ends,
                                       dim='event',
                                       data=events)
    return sc.DataArray(**coords_labs_data)