Пример #1
0
def _create_metadata_buffer_array(name: str, unit: str, dtype: Any, buffer_size: int):
    return sc.DataArray(sc.zeros(dims=[name],
                                 shape=(buffer_size, ),
                                 unit=unit,
                                 dtype=dtype),
                        coords={
                            "time":
                            sc.zeros(dims=[name],
                                     shape=(buffer_size, ),
                                     unit=sc.Unit("nanoseconds"),
                                     dtype=np.dtype('datetime64[ns]'))
                        })
Пример #2
0
def _create_empty_events_data_array(
        tof_dtype: Any = np.int64,
        tof_unit: Union[str, sc.Unit] = "ns",
        detector_id_dtype: Any = np.int32) -> sc.DataArray:
    data = sc.DataArray(data=sc.empty(dims=[_event_dimension],
                                      shape=[0],
                                      unit='counts',
                                      with_variances=True,
                                      dtype=np.float32),
                        coords={
                            _time_of_flight:
                            sc.empty(dims=[_event_dimension],
                                     shape=[0],
                                     dtype=tof_dtype,
                                     unit=tof_unit),
                            _detector_dimension:
                            sc.empty(dims=[_event_dimension],
                                     shape=[0],
                                     dtype=detector_id_dtype),
                        })
    indices = sc.array(dims=[_pulse_dimension], values=[], dtype='int64')
    return sc.DataArray(data=sc.bins(begin=indices,
                                     end=indices,
                                     dim=_event_dimension,
                                     data=data),
                        coords={
                            'pulse_time':
                            sc.zeros(dims=[_pulse_dimension],
                                     shape=[0],
                                     dtype='datetime64',
                                     unit='ns')
                        })
Пример #3
0
 def __init__(self, queue: mp.Queue, event_buffer_size: int,
              slow_metadata_buffer_size: int, fast_metadata_buffer_size: int,
              chopper_buffer_size: int, interval_s: float, run_id: str):
     self._buffer_mutex = threading.Lock()
     self._interval_s = interval_s
     self._event_buffer_size = event_buffer_size
     self._slow_metadata_buffer_size = slow_metadata_buffer_size
     self._fast_metadata_buffer_size = fast_metadata_buffer_size
     self._chopper_buffer_size = chopper_buffer_size
     self._current_run_id = run_id
     tof_buffer = sc.zeros(dims=['event'],
                           shape=[event_buffer_size],
                           unit=sc.units.ns,
                           dtype=sc.DType.int32)
     id_buffer = sc.zeros(dims=['event'],
                          shape=[event_buffer_size],
                          unit=sc.units.one,
                          dtype=sc.DType.int32)
     pulse_times = sc.zeros(dims=['event'],
                            shape=[event_buffer_size],
                            unit=sc.units.ns,
                            dtype=sc.DType.int64)
     weights = sc.ones(dims=['event'],
                       shape=[event_buffer_size],
                       with_variances=True)
     self._events_buffer = sc.DataArray(weights,
                                        coords={
                                            'tof': tof_buffer,
                                            'detector_id': id_buffer,
                                            'pulse_time': pulse_times
                                        })
     self._current_event = 0
     self._cancelled = False
     self._notify_cancelled = threading.Condition()
     self._unrecognised_fb_id_count = 0
     self._periodic_emit: Optional[threading.Thread] = None
     self._emit_queue = queue
     # Access metadata buffer by
     # self._metadata_buffers[flatbuffer_id][source_name]
     self._metadata_buffers: Dict[str, Dict[str, _MetadataBuffer]] = {
         flatbuffer_id: {}
         for flatbuffer_id in metadata_ids
     }
Пример #4
0
 def __init__(self, stream_info: StreamInfo, buffer_size: int, data_queue: mp.Queue):
     self._buffer_mutex = threading.Lock()
     self._buffer_size = buffer_size
     self._name = stream_info.source_name
     self._data_queue = data_queue
     self._buffer_filled_size = 0
     self._data_array = sc.zeros(dims=[self._name],
                                 shape=(buffer_size, ),
                                 unit=sc.Unit("nanoseconds"),
                                 dtype=np.int64)
Пример #5
0
def _stitch_dense_data(
        item: sc.DataArray, frames: sc.Dataset, dim: str, new_dim: str,
        bins: Union[int, sc.Variable]) -> Union[sc.DataArray, dict]:

    # Make empty data container
    if isinstance(bins, int):
        new_coord = sc.linspace(
            dim=new_dim,
            start=(frames["time_min"]["frame", 0] -
                   frames["time_correction"]["frame", 0]).value,
            stop=(frames["time_max"]["frame", -1] -
                  frames["time_correction"]["frame", -1]).value,
            num=bins + 1,
            unit=frames["time_min"].unit,
        )
    else:
        new_coord = bins

    dims = []
    shape = []
    for dim_ in item.dims:
        if dim_ != dim:
            dims.append(dim_)
            shape.append(item.sizes[dim_])
        else:
            dims.append(new_dim)
            shape.append(new_coord.sizes[new_dim] - 1)

    out = sc.DataArray(data=sc.zeros(dims=dims,
                                     shape=shape,
                                     with_variances=item.variances is not None,
                                     unit=item.unit),
                       coords={new_dim: new_coord})
    for group in ["coords", "attrs"]:
        for key in getattr(item, group):
            if key != dim:
                getattr(out, group)[key] = getattr(item, group)[key].copy()

    for i in range(frames.sizes["frame"]):
        section = item[dim, frames["time_min"].data[
            "frame", i]:frames["time_max"].data["frame",
                                                i]].rename_dims({dim: new_dim})
        section.coords[new_dim] = section.coords[dim] - frames[
            "time_correction"].data["frame", i]
        if new_dim != dim:
            del section.coords[dim]

        out += sc.rebin(section, new_dim, out.coords[new_dim])

    return out
Пример #6
0
def convert_Workspace2D_to_data_array(ws,
                                      load_run_logs=True,
                                      advanced_geometry=False,
                                      **ignored):

    dim, unit = validate_and_get_unit(ws.getAxis(0).getUnit())
    spec_dim, spec_coord = init_spec_axis(ws)

    coords_labs_data = _convert_MatrixWorkspace_info(
        ws, advanced_geometry=advanced_geometry, load_run_logs=load_run_logs)
    _, data_unit = validate_and_get_unit(ws.YUnit(), allow_empty=True)
    if ws.id() == 'MaskWorkspace':
        coords_labs_data["data"] = sc.Variable(dims=[spec_dim],
                                               unit=data_unit,
                                               values=ws.extractY().flatten(),
                                               dtype=sc.DType.bool)
    else:
        stddev2 = ws.extractE()
        np.multiply(stddev2, stddev2, out=stddev2)  # much faster than np.power
        coords_labs_data["data"] = sc.Variable(dims=[spec_dim, dim],
                                               unit=data_unit,
                                               values=ws.extractY(),
                                               variances=stddev2)
    array = sc.DataArray(**coords_labs_data)

    if ws.hasAnyMaskedBins():
        bin_mask = sc.zeros(dims=array.dims,
                            shape=array.shape,
                            dtype=sc.DType.bool)
        for i in range(ws.getNumberHistograms()):
            # maskedBinsIndices throws instead of returning empty list
            if ws.hasMaskedBins(i):
                set_bin_masks(bin_mask, dim, i, ws.maskedBinsIndices(i))
        common_mask = sc.all(bin_mask, spec_dim)
        if sc.identical(common_mask, sc.any(bin_mask, spec_dim)):
            array.masks["bin"] = common_mask
        else:
            array.masks["bin"] = bin_mask

    # Avoid creating dimensions that are not required since this mostly an
    # artifact of inflexible data structures and gets in the way when working
    # with scipp.
    if len(spec_coord.values) == 1:
        if 'position' in array.coords:
            array.coords['position'] = array.coords['position'][spec_dim, 0]
        array = array[spec_dim, 0].copy()
    return array
Пример #7
0
def make_tof_binned_events():
    buffer = sc.DataArray(sc.zeros(dims=['event'], shape=[7], dtype=float),
                          coords={
                              'tof':
                              sc.array(dims=['event'],
                                       values=[
                                           1000.0, 3000.0, 2000.0, 4000.0,
                                           5000.0, 6000.0, 3000.0
                                       ],
                                       unit='us')
                          })
    return sc.bins(data=buffer,
                   dim='event',
                   begin=sc.array(dims=['spectrum'],
                                  values=[0, 4],
                                  dtype='int64'),
                   end=sc.array(dims=['spectrum'],
                                values=[4, 7],
                                dtype='int64'))
Пример #8
0
    def test_Workspace2D_common_bins_not_common_masks(self):
        import mantid.simpleapi as mantid
        eventWS = self.base_event_ws
        ws = mantid.Rebin(eventWS, 10000, PreserveEvents=False)
        ws_x = ws.readX(0)

        # mask first 3 bins in first 3 spectra, range is taken as [XMin, XMax)
        masked_ws = self._mask_bins_and_spectra(ws,
                                                xmin=ws_x[0],
                                                xmax=ws_x[3],
                                                num_spectra=3,
                                                indices='0-2')

        self.assertTrue(masked_ws.isCommonBins())

        ds = scn.mantid.convert_Workspace2D_to_data_array(masked_ws)

        mask = sc.zeros(dims=ds.dims, shape=ds.shape, dtype=sc.DType.bool)
        mask['spectrum', 0:3]['tof', 0:3] |= sc.scalar(True)
        assert sc.identical(ds.masks['bin'], mask)

        np.testing.assert_array_equal(ds.masks["spectrum"].values[0:3],
                                      [True, True, True])
Пример #9
0
def _frames_from_slopes(data):
    detector_pos_norm = sc.norm(data.meta["position"])

    # Get the number of WFM frames
    choppers = {
        key: data.meta[key].value
        for key in ch.find_chopper_keys(data)
    }
    nframes = ch.cutout_angles_begin(choppers["chopper_wfm_1"]).sizes["frame"]

    # Now find frame boundaries
    frames = sc.Dataset()
    frames["time_min"] = sc.zeros(dims=["frame"],
                                  shape=[nframes],
                                  unit=sc.units.us)
    frames["time_max"] = sc.zeros_like(frames["time_min"])
    frames["delta_time_min"] = sc.zeros_like(frames["time_min"])
    frames["delta_time_max"] = sc.zeros_like(frames["time_min"])
    frames["wavelength_min"] = sc.zeros(dims=["frame"],
                                        shape=[nframes],
                                        unit=sc.units.angstrom)
    frames["wavelength_max"] = sc.zeros_like(frames["wavelength_min"])
    frames["delta_wavelength_min"] = sc.zeros_like(frames["wavelength_min"])
    frames["delta_wavelength_max"] = sc.zeros_like(frames["wavelength_min"])

    frames["time_correction"] = sc.zeros(dims=["frame"],
                                         shape=[nframes],
                                         unit=sc.units.us)

    near_wfm_chopper = choppers["chopper_wfm_1"]
    far_wfm_chopper = choppers["chopper_wfm_2"]

    # Distance between WFM choppers
    dz_wfm = sc.norm(far_wfm_chopper["position"].data -
                     near_wfm_chopper["position"].data)
    # Mid-point between WFM choppers
    z_wfm = 0.5 * (near_wfm_chopper["position"].data +
                   far_wfm_chopper["position"].data)
    # Distance between detector positions and wfm chopper mid-point
    zdet_minus_zwfm = sc.norm(data.meta["position"] - z_wfm)
    # Neutron mass to Planck constant ratio
    alpha = sc.to_unit(constants.m_n / constants.h, 'us/m/angstrom')

    near_t_open = ch.time_open(near_wfm_chopper)
    near_t_close = ch.time_closed(near_wfm_chopper)
    far_t_open = ch.time_open(far_wfm_chopper)

    for i in range(nframes):
        dt_lambda_max = near_t_close["frame", i] - near_t_open["frame", i]
        slope_lambda_max = dz_wfm / dt_lambda_max
        intercept_lambda_max = sc.norm(
            near_wfm_chopper["position"].data
        ) - slope_lambda_max * near_t_close["frame", i]
        t_lambda_max = (detector_pos_norm -
                        intercept_lambda_max) / slope_lambda_max

        slope_lambda_min = sc.norm(near_wfm_chopper["position"].data) / (
            near_t_close["frame", i] -
            (data.meta["source_pulse_length"] + data.meta["source_pulse_t_0"]))
        intercept_lambda_min = sc.norm(
            far_wfm_chopper["position"].data
        ) - slope_lambda_min * far_t_open["frame", i]
        t_lambda_min = (detector_pos_norm -
                        intercept_lambda_min) / slope_lambda_min

        t_lambda_min_plus_dt = (
            detector_pos_norm -
            (sc.norm(near_wfm_chopper["position"].data) -
             slope_lambda_min * near_t_close["frame", i])) / slope_lambda_min
        dt_lambda_min = t_lambda_min_plus_dt - t_lambda_min

        # Compute wavelength information
        lambda_min = (t_lambda_min + 0.5 * dt_lambda_min -
                      far_t_open["frame", i]) / (alpha * zdet_minus_zwfm)
        lambda_max = (t_lambda_max - 0.5 * dt_lambda_max -
                      far_t_open["frame", i]) / (alpha * zdet_minus_zwfm)
        dlambda_min = dz_wfm * lambda_min / zdet_minus_zwfm
        dlambda_max = dz_wfm * lambda_max / zdet_minus_zwfm

        frames["time_min"]["frame", i] = t_lambda_min
        frames["delta_time_min"]["frame", i] = dt_lambda_min
        frames["time_max"]["frame", i] = t_lambda_max
        frames["delta_time_max"]["frame", i] = dt_lambda_max
        frames["wavelength_min"]["frame", i] = lambda_min
        frames["wavelength_max"]["frame", i] = lambda_max
        frames["delta_wavelength_min"]["frame", i] = dlambda_min
        frames["delta_wavelength_max"]["frame", i] = dlambda_max
        frames["time_correction"]["frame", i] = far_t_open["frame", i]

    frames["wfm_chopper_mid_point"] = z_wfm
    return frames
Пример #10
0
def convert_EventWorkspace_to_data_array(ws,
                                         load_pulse_times=True,
                                         advanced_geometry=False,
                                         load_run_logs=True,
                                         **ignored):
    dim, unit = validate_and_get_unit(ws.getAxis(0).getUnit())
    spec_dim, spec_coord = init_spec_axis(ws)
    nHist = ws.getNumberHistograms()
    _, data_unit = validate_and_get_unit(ws.YUnit(), allow_empty=True)

    n_event = ws.getNumberEvents()
    coord = sc.zeros(dims=['event'],
                     shape=[n_event],
                     unit=unit,
                     dtype=sc.DType.float64)
    weights = sc.ones(dims=['event'],
                      shape=[n_event],
                      unit=data_unit,
                      dtype=sc.DType.float32,
                      with_variances=True)
    pulse_times = sc.empty(dims=['event'],
                           shape=[n_event],
                           dtype=sc.DType.datetime64,
                           unit=sc.units.ns) if load_pulse_times else None

    begins = sc.zeros(dims=[spec_dim, dim],
                      shape=[nHist, 1],
                      dtype=sc.DType.int64)
    ends = begins.copy()
    current = 0
    for i in range(nHist):
        sp = ws.getSpectrum(i)
        size = sp.getNumberEvents()
        coord['event', current:current + size].values = sp.getTofs()
        if load_pulse_times:
            pulse_times['event', current:current +
                        size].values = sp.getPulseTimesAsNumpy()
        if _contains_weighted_events(sp):
            weights['event', current:current + size].values = sp.getWeights()
            weights['event',
                    current:current + size].variances = sp.getWeightErrors()
        begins.values[i] = current
        ends.values[i] = current + size
        current += size

    proto_events = {'data': weights, 'coords': {dim: coord}}
    if load_pulse_times:
        proto_events["coords"]["pulse_time"] = pulse_times
    events = sc.DataArray(**proto_events)

    coords_labs_data = _convert_MatrixWorkspace_info(
        ws, advanced_geometry=advanced_geometry, load_run_logs=load_run_logs)
    # For now we ignore potential finer bin edges to avoid creating too many
    # bins. Use just a single bin along dim and use extents given by workspace
    # edges.
    # TODO If there are events outside edges this might create bins with
    # events that are not within bin bounds. Consider using `bin` instead
    # of `bins`?
    edges = coords_labs_data['coords'][dim]
    # Using range slice of thickness 1 to avoid transposing 2-D coords
    coords_labs_data['coords'][dim] = sc.concat(
        [edges[dim, :1], edges[dim, -1:]], dim)

    coords_labs_data["data"] = sc.bins(begin=begins,
                                       end=ends,
                                       dim='event',
                                       data=events)
    return sc.DataArray(**coords_labs_data)
Пример #11
0
def get_detector_properties(ws,
                            source_pos,
                            sample_pos,
                            spectrum_dim,
                            advanced_geometry=False):
    if not advanced_geometry:
        return (get_detector_pos(ws, spectrum_dim), None, None)
    spec_info = ws.spectrumInfo()
    det_info = ws.detectorInfo()
    comp_info = ws.componentInfo()
    nspec = len(spec_info)
    det_rot = np.zeros([nspec, 3, 3])
    det_bbox = np.zeros([nspec, 3])

    if sample_pos is not None and source_pos is not None:
        total_detectors = spec_info.detectorCount()
        act_beam = (sample_pos - source_pos)
        rot = _rot_from_vectors(act_beam, sc.vector(value=[0, 0, 1]))
        inv_rot = _rot_from_vectors(sc.vector(value=[0, 0, 1]), act_beam)

        pos_d = sc.Dataset()
        # Create empty to hold position info for all spectra detectors
        pos_d["x"] = sc.zeros(dims=["detector"],
                              shape=[total_detectors],
                              unit=sc.units.m)
        pos_d["y"] = sc.zeros_like(pos_d["x"])
        pos_d["z"] = sc.zeros_like(pos_d["x"])
        pos_d.coords[spectrum_dim] = sc.array(dims=["detector"],
                                              values=np.empty(total_detectors))

        spectrum_values = pos_d.coords[spectrum_dim].values

        x_values = pos_d["x"].values
        y_values = pos_d["y"].values
        z_values = pos_d["z"].values

        idx = 0
        for i, spec in enumerate(spec_info):
            if spec.hasDetectors:
                definition = spec_info.getSpectrumDefinition(i)
                n_dets = len(definition)
                quats = []
                bboxes = []
                for j in range(n_dets):
                    det_idx = definition[j][0]
                    p = det_info.position(det_idx)
                    r = det_info.rotation(det_idx)
                    spectrum_values[idx] = i
                    x_values[idx] = p.X()
                    y_values[idx] = p.Y()
                    z_values[idx] = p.Z()
                    idx += 1
                    quats.append(
                        np.array([r.imagI(),
                                  r.imagJ(),
                                  r.imagK(),
                                  r.real()]))
                    if comp_info.hasValidShape(det_idx):
                        s = comp_info.shape(det_idx)
                        bboxes.append(s.getBoundingBox().width())
                det_rot[
                    i, :] = sc.geometry.rotation_matrix_from_quaternion_coeffs(
                        np.mean(quats, axis=0))
                det_bbox[i, :] = np.sum(bboxes, axis=0)

        rot_pos = rot * sc.geometry.position(pos_d["x"].data, pos_d["y"].data,
                                             pos_d["z"].data)

        _to_spherical(rot_pos, pos_d)

        averaged = sc.groupby(pos_d,
                              spectrum_dim,
                              bins=sc.Variable(dims=[spectrum_dim],
                                               values=np.arange(
                                                   -0.5,
                                                   len(spec_info) + 0.5,
                                                   1.0))).mean("detector")

        sign = averaged["p-sign"].data / sc.abs(averaged["p-sign"].data)
        averaged["p"] = sign * (
            (np.pi * sc.units.rad) - averaged["p-delta"].data)
        averaged["x"] = averaged["r"].data * sc.sin(
            averaged["t"].data) * sc.cos(averaged["p"].data)
        averaged["y"] = averaged["r"].data * sc.sin(
            averaged["t"].data) * sc.sin(averaged["p"].data)
        averaged["z"] = averaged["r"].data * sc.cos(averaged["t"].data)

        pos = sc.geometry.position(averaged["x"].data, averaged["y"].data,
                                   averaged["z"].data)

        return (inv_rot * pos,
                sc.spatial.linear_transforms(dims=[spectrum_dim],
                                             values=det_rot),
                sc.vectors(dims=[spectrum_dim],
                           values=det_bbox,
                           unit=sc.units.m))
    else:
        pos = np.zeros([nspec, 3])

        for i, spec in enumerate(spec_info):
            if spec.hasDetectors:
                definition = spec_info.getSpectrumDefinition(i)
                n_dets = len(definition)
                vec3s = []
                quats = []
                bboxes = []
                for j in range(n_dets):
                    det_idx = definition[j][0]
                    p = det_info.position(det_idx)
                    r = det_info.rotation(det_idx)
                    vec3s.append([p.X(), p.Y(), p.Z()])
                    quats.append(
                        np.array([r.imagI(),
                                  r.imagJ(),
                                  r.imagK(),
                                  r.real()]))
                    if comp_info.hasValidShape(det_idx):
                        s = comp_info.shape(det_idx)
                        bboxes.append(s.getBoundingBox().width())
                pos[i, :] = np.mean(vec3s, axis=0)
                det_rot[
                    i, :] = sc.geometry.rotation_matrix_from_quaternion_coeffs(
                        np.mean(quats, axis=0))
                det_bbox[i, :] = np.sum(bboxes, axis=0)
            else:
                pos[i, :] = [np.nan, np.nan, np.nan]
                det_rot[i, :] = [np.nan, np.nan, np.nan, np.nan]
                det_bbox[i, :] = [np.nan, np.nan, np.nan]
        return (sc.vectors(dims=[spectrum_dim], values=pos, unit=sc.units.m),
                sc.spatial.linear_transforms(dims=[spectrum_dim],
                                             values=det_rot),
                sc.vectors(
                    dims=[spectrum_dim],
                    values=det_bbox,
                    unit=sc.units.m,
                ))
Пример #12
0
def test_zeros_creates_variable_with_correct_dims_and_shape():
    var = sc.zeros(dims=['x', 'y', 'z'], shape=[1, 2, 3])
    expected = sc.Variable(dims=['x', 'y', 'z'], shape=[1, 2, 3])

    comparison = var == expected
    assert comparison.values.all()