Exemplo n.º 1
0
    def test_unit_conversion(self):
        import mantid.simpleapi as mantid
        eventWS = mantid.CloneWorkspace(self.base_event_ws)
        ws = mantid.Rebin(eventWS, 10000, PreserveEvents=False)
        tmp = mantidcompat.convert_Workspace2D_to_data_array(ws)
        target_tof = tmp.coords['tof']
        ws = mantid.ConvertUnits(InputWorkspace=ws,
                                 Target="Wavelength",
                                 EMode="Elastic")
        converted_mantid = mantidcompat.convert_Workspace2D_to_data_array(ws)

        da = mantidcompat.convert_EventWorkspace_to_data_array(
            eventWS, load_pulse_times=False)
        da.realign({'tof': target_tof})
        da = sc.histogram(da)
        d = sc.Dataset(da)
        converted = sc.neutron.convert(d, 'tof', 'wavelength')

        self.assertTrue(
            np.all(np.isclose(converted_mantid.values, converted[""].values)))
        self.assertTrue(
            np.all(
                np.isclose(
                    converted_mantid.coords['wavelength'].values,
                    converted.coords['wavelength'].values,
                )))
Exemplo n.º 2
0
    def test_unit_conversion(self):
        import mantid.simpleapi as mantid
        eventWS = self.base_event_ws
        ws = mantid.Rebin(eventWS, 10000, PreserveEvents=False)
        tmp = scn.mantid.convert_Workspace2D_to_data_array(ws)
        target_tof = tmp.coords['tof']
        ws = mantid.ConvertUnits(InputWorkspace=ws,
                                 Target="Wavelength",
                                 EMode="Elastic")
        converted_mantid = scn.mantid.convert_Workspace2D_to_data_array(ws)

        da = scn.mantid.convert_EventWorkspace_to_data_array(
            eventWS, load_pulse_times=False)
        da = sc.histogram(da, bins=target_tof)
        d = sc.Dataset(data={da.name: da})
        converted = scn.convert(d, 'tof', 'wavelength', scatter=True)

        self.assertTrue(
            np.all(np.isclose(converted_mantid.values, converted[""].values)))
        self.assertTrue(
            np.all(
                np.isclose(
                    converted_mantid.coords['wavelength'].values,
                    converted.coords['wavelength'].values,
                )))
Exemplo n.º 3
0
    def test_unit_conversion(self):
        import mantid.simpleapi as mantid
        eventWS = mantid.CloneWorkspace(self.base_event_ws)
        ws = mantid.Rebin(eventWS, 10000, PreserveEvents=False)
        tmp = mantidcompat.convert_Workspace2D_to_dataarray(ws)
        target_tof = tmp.coords[sc.Dim.Tof]
        ws = mantid.ConvertUnits(InputWorkspace=ws,
                                 Target="Wavelength",
                                 EMode="Elastic")
        converted_mantid = mantidcompat.convert_Workspace2D_to_dataarray(ws)

        da = mantidcompat.convertEventWorkspace_to_dataarray(
            eventWS, False)
        da = sc.histogram(da, target_tof)
        d = sc.Dataset(da)
        converted = sc.neutron.convert(d, sc.Dim.Tof, sc.Dim.Wavelength)

        self.assertTrue(
            np.all(np.isclose(converted_mantid.values, converted[""].values)))
        self.assertTrue(
            np.all(
                np.isclose(
                    converted_mantid.coords[sc.Dim.Wavelength].values,
                    converted.coords[sc.Dim.Wavelength].values,
                )))
Exemplo n.º 4
0
def test_dataset_histogram():
    var = sc.Variable(dims=['x'], shape=[2], dtype=sc.dtype.event_list_float64)
    var['x', 0].values = np.arange(3)
    var['x', 0].values.append(42)
    var['x', 0].values.extend(np.ones(3))
    var['x', 1].values = np.ones(6)
    ds = sc.Dataset()
    s = sc.DataArray(data=sc.Variable(dims=['x'],
                                      values=np.ones(2),
                                      variances=np.ones(2)),
                     coords={'y': var})
    s1 = sc.DataArray(data=sc.Variable(dims=['x'],
                                       values=np.ones(2),
                                       variances=np.ones(2)),
                      coords={'y': var * 5.0})
    realign_coords = {
        'y': sc.Variable(values=np.arange(5, dtype=np.float64), dims=['y'])
    }
    ds['s'] = sc.realign(s, realign_coords)
    ds['s1'] = sc.realign(s1, realign_coords)
    h = sc.histogram(ds)
    assert np.array_equal(
        h['s'].values, np.array([[1.0, 4.0, 1.0, 0.0], [0.0, 6.0, 0.0, 0.0]]))
    assert np.array_equal(
        h['s1'].values, np.array([[1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]]))
Exemplo n.º 5
0
def reduce(data, q_bins):
    data = sc.neutron.convert(data, 'wavelength', 'Q',
                              out=data)  # TODO no gravity yet
    data = sc.histogram(data, q_bins)
    if 'layer' in data.coords:
        return sc.groupby(data, 'layer').sum('spectrum')
    else:
        return sc.sum(data, 'spectrum')
Exemplo n.º 6
0
def test_realign():
    base = make_dataarray()
    y = sc.Variable(dims=['y'], values=[0.0, 4.0])
    realigned = sc.realign(base, coords={'y': y})
    assert realigned.data is None
    assert sc.is_equal(realigned.unaligned, base)
    expected = sc.DataArray(data=sc.Variable(dims=['y'], values=[4.0]),
                            coords={'y': y})
    assert sc.is_equal(sc.histogram(realigned), expected)
Exemplo n.º 7
0
def reduce_to_q(data, *, q_bins, reducer, wavelength_bands=None):
    """
    Example:
    >>> reduced = reduce_to_q(data, q_bins=q_bins, reducer=simple_reducer('spectrum'))  # noqa: E501
    """
    # TODO Backup of the coord is necessary until `convert` can keep original
    wavelength = data.coords['wavelength']
    data = scn.convert(data, 'wavelength', 'Q', scatter=True)
    if wavelength_bands is None:
        data = sc.histogram(data, q_bins)
        return reducer(data)
    data.coords['wavelength'] = wavelength
    bands = None
    for i in range(wavelength_bands.sizes['wavelength'] - 1):
        low = wavelength_bands['wavelength', i]
        high = wavelength_bands['wavelength', i + 1]
        band = sc.histogram(data['wavelength', low:high], q_bins)
        band = reducer(band)
        bands = sc.concat([bands, band],
                          'wavelength') if bands is not None else band
    bands.coords['wavelength'] = wavelength_bands
    return bands
Exemplo n.º 8
0
    def test_EventWorkspace(self):
        import mantid.simpleapi as mantid
        eventWS = mantid.CloneWorkspace(self.base_event_ws)
        ws = mantid.Rebin(eventWS, 10000)

        binned_mantid = mantidcompat.convert_Workspace2D_to_data_array(ws)

        target_tof = binned_mantid.coords[sc.Dim.Tof]
        d = mantidcompat.convert_EventWorkspace_to_data_array(eventWS, False)
        binned = sc.histogram(d, target_tof)

        delta = sc.sum(binned_mantid - binned, sc.Dim.Spectrum)
        delta = sc.sum(delta, sc.Dim.Tof)
        self.assertLess(np.abs(delta.value), 1e-5)
Exemplo n.º 9
0
def reduce_by_wavelength(data, q_bins, groupby, wavelength_bands):
    slices = contrib.make_slices(
        contrib.midpoints(data.coords['wavelength'], 'wavelength'),
        'wavelength', wavelength_bands)
    data = sc.neutron.convert(data, 'wavelength', 'Q',
                              out=data)  # TODO no gravity yet
    bands = None
    for s in slices:
        band = sc.histogram(data['Q', s], q_bins)
        band = sc.groupby(band, group=groupby).sum('spectrum')
        bands = sc.concatenate(bands, band,
                               'wavelength') if bands is not None else band
    bands.coords['wavelength'] = wavelength_bands
    return bands
Exemplo n.º 10
0
    def test_EventWorkspace(self):
        # This is from the Mantid system-test data
        filename = 'CNCS_51936_event.nxs'
        eventWS = mantid.LoadEventNexus(filename)
        ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False)

        binned_mantid = mantidcompat.to_dataset(ws)

        tof = sp.Variable(binned_mantid[sp.Coord.Tof])
        d = mantidcompat.to_dataset(eventWS)
        binned = sp.histogram(d, tof)

        delta = sp.sum(binned_mantid - binned, sp.Dim.Position)
        print(delta)
Exemplo n.º 11
0
    def test_EventWorkspace(self):
        import mantid.simpleapi as mantid
        eventWS = self.base_event_ws
        ws = mantid.Rebin(eventWS, 10000)

        binned_mantid = scn.mantid.convert_Workspace2D_to_data_array(ws)

        target_tof = binned_mantid.coords['tof']
        d = scn.mantid.convert_EventWorkspace_to_data_array(
            eventWS, load_pulse_times=False)
        binned = sc.histogram(d, bins=target_tof)

        delta = sc.sum(binned_mantid - binned, 'spectrum')
        delta = sc.sum(delta, 'tof')
        self.assertLess(np.abs(delta.value), 1e-5)
Exemplo n.º 12
0
def test_variable_histogram():
    var = sc.Variable(dims=['x'], shape=[2], dtype=sc.dtype.event_list_float64)
    var['x', 0].values = np.arange(3)
    var['x', 0].values.append(42)
    var['x', 0].values.extend(np.ones(3))
    var['x', 1].values = np.ones(6)
    ds = sc.Dataset()
    ds['events'] = sc.DataArray(data=sc.Variable(dims=['x'],
                                                 values=np.ones(2),
                                                 variances=np.ones(2)),
                                coords={'y': var})
    hist = sc.histogram(
        ds['events'],
        sc.Variable(values=np.arange(5, dtype=np.float64), dims=['y']))
    assert np.array_equal(
        hist.values, np.array([[1.0, 4.0, 1.0, 0.0], [0.0, 6.0, 0.0, 0.0]]))
Exemplo n.º 13
0
def test_realign():
    co = sc.Variable(['x'], shape=[1], dtype=sc.dtype.event_list_float64)
    co.values[0].append(1.0)
    co.values[0].append(2.0)
    co.values[0].append(2.0)
    data = sc.Variable(['y'],
                       dtype=sc.dtype.float64,
                       values=np.array([1]),
                       variances=np.array([1]))
    da = sc.DataArray(data=data, coords={'x': co})
    assert not da.unaligned
    da_r = sc.realign(
        da, {'x': sc.Variable(['x'], values=np.array([0.0, 1.0, 3.0]))})
    assert da_r.shape == [1, 2]
    assert da_r.unaligned == da
    assert not da_r.data
    assert np.allclose(sc.histogram(da_r).values, np.array([0, 3]), atol=1e-9)
    da.realign({'x': sc.Variable(['x'], values=np.array([0.0, 1.0, 3.0]))})
    assert da.shape == [1, 2]
Exemplo n.º 14
0
def test_histogram_and_setitem():
    var = sc.Variable(dims=['x'],
                      shape=[2],
                      dtype=sc.dtype.event_list_float64,
                      unit=sc.units.us)
    var['x', 0].values = np.arange(3)
    var['x', 0].values.append(42)
    var['x', 0].values.extend(np.ones(3))
    var['x', 1].values = np.ones(6)
    ds = sc.Dataset()
    ds['s'] = sc.DataArray(data=sc.Variable(dims=['x'],
                                            values=np.ones(2),
                                            variances=np.ones(2)),
                           coords={'tof': var})
    assert 'tof' in ds.coords
    assert 'tof' in ds['s'].coords
    edges = sc.Variable(dims=['tof'], values=np.arange(5.0), unit=sc.units.us)
    h = sc.histogram(ds['s'], edges)
    assert np.array_equal(
        h.values, np.array([[1.0, 4.0, 1.0, 0.0], [0.0, 6.0, 0.0, 0.0]]))
    assert 'tof' in ds.coords
Exemplo n.º 15
0
    def test_unit_conversion(self):
        # This is from the Mantid system-test data
        filename = 'CNCS_51936_event.nxs'
        eventWS = mantid.LoadEventNexus(filename)
        ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False)
        tmp = mantidcompat.to_dataset(ws)
        tof = sp.Variable(tmp[sp.Coord.Tof])
        ws = mantid.ConvertUnits(InputWorkspace=ws, Target='DeltaE',
                                 EMode='Direct', EFixed=3.3056)

        converted_mantid = mantidcompat.to_dataset(ws)
        converted_mantid[sp.Coord.Ei] = ([], 3.3059)

        d = mantidcompat.to_dataset(eventWS, drop_pulse_times=True)
        d[sp.Coord.Ei] = ([], 3.3059)
        d.merge(sp.histogram(d, tof))
        del(d[sp.Data.Events])
        converted = sp.convert(d, sp.Dim.Tof, sp.Dim.DeltaE)

        delta = sp.sum(converted_mantid - converted, sp.Dim.Position)
        print(delta)
Exemplo n.º 16
0
        sc.Variable(
            make_component_info(source_pos=[0, 0, -20], sample_pos=[0, 0, 0]))
    })

# Add sparse TOF coord, i.e., the equivalent to event-TOF in Mantid
tofs = sc.Variable(dims=[Dim.Position, Dim.Tof],
                   shape=[n_pixel, sc.Dimensions.Sparse],
                   unit=sc.units.us)
d['sample'] = sc.DataArray(coords={Dim.Tof: tofs})

# Set some positions
d.coords[Dim.Position].values[0] = [1, 2, 3]
print(d.coords[Dim.Position].values[0])

# Add some events
# Note: d.coords[Dim.Tof] gives the "dense" TOF coord, not the event-TOFs
d['sample'].coords[Dim.Tof][Dim.Position, 0].values = np.arange(10)
# The following should be equivalent but does not work yet, see scipp/#290
# d['sample'].coords[Dim.Tof].values[1] = np.arange(10)
print(d)

dspacing = sc.neutron.convert(d, Dim.Tof, Dim.DSpacing)
print(dspacing)

# Converting event data to histogram
hist = sc.histogram(dspacing, dspacing.coords[Dim.DSpacing])
print(hist)

# "DiffractionFocussing" == sum? (not available yet)
# focussed = sc.sum(hist, Dim.Position)
Exemplo n.º 17
0
def _do_stitching_on_beamline(wavelengths, dim, event_mode=False):
    # Make beamline parameters for 6 frames
    coords = wfm.make_fake_beamline(nframes=6)

    # They are all created half-way through the pulse.
    # Compute their arrival time at the detector.
    alpha = sc.to_unit(constants.m_n / constants.h, 's/m/angstrom')
    dz = sc.norm(coords['position'] - coords['source_position'])
    arrival_times = sc.to_unit(alpha * dz * wavelengths,
                               'us') + coords['source_pulse_t_0'] + (
                                   0.5 * coords['source_pulse_length'])
    coords[dim] = arrival_times

    # Make a data array that contains the beamline and the time coordinate
    tmin = sc.min(arrival_times)
    tmax = sc.max(arrival_times)
    dt = 0.1 * (tmax - tmin)

    if event_mode:
        num = 2
    else:
        num = 2001
    time_binning = sc.linspace(dim=dim,
                               start=(tmin - dt).value,
                               stop=(tmax + dt).value,
                               num=num,
                               unit=dt.unit)
    events = sc.DataArray(data=sc.ones(dims=['event'],
                                       shape=arrival_times.shape,
                                       unit=sc.units.counts,
                                       with_variances=True),
                          coords=coords)
    if event_mode:
        da = sc.bin(events, edges=[time_binning])
    else:
        da = sc.histogram(events, bins=time_binning)

    # Find location of frames
    frames = wfm.get_frames(da)

    stitched = wfm.stitch(frames=frames, data=da, dim=dim, bins=2001)

    wav = scn.convert(stitched,
                      origin='tof',
                      target='wavelength',
                      scatter=False)
    if event_mode:
        out = wav
    else:
        out = sc.rebin(wav,
                       dim='wavelength',
                       bins=sc.linspace(dim='wavelength',
                                        start=1.0,
                                        stop=10.0,
                                        num=1001,
                                        unit='angstrom'))

    choppers = {key: da.meta[key].value for key in ch.find_chopper_keys(da)}
    # Distance between WFM choppers
    dz_wfm = sc.norm(choppers["chopper_wfm_2"]["position"].data -
                     choppers["chopper_wfm_1"]["position"].data)
    # Delta_lambda  / lambda
    dlambda_over_lambda = dz_wfm / sc.norm(
        coords['position'] - frames['wfm_chopper_mid_point'].data)

    return out, dlambda_over_lambda
Exemplo n.º 18
0
def powder_reduction(sample='sample.nxs',
                     calibration=None,
                     vanadium=None,
                     empty_instr=None,
                     lambda_binning=(0.7, 10.35, 5615),
                     **absorp):
    """
    Simple WISH reduction workflow

    Note
    ----

    The sample data were not recorded using the same layout
    of WISH as the Vanadium and empty instrument. That's why:
    - loading calibration for Vanadium used a different IDF
    - the Vanadium correction involved cropping the sample data
      to the first 5 groups (panels)
    ----

    Corrections applied:
    - Vanadium correction
    - Absorption correction
    - Normalization by monitors
    - Conversion considering calibration
    - Masking and grouping detectors into panels

    Parameters
    ----------
    sample: Nexus event file

    calibration: .cal file following Mantid's standards
        The columns correspond to detectors' IDs, offset, selection of detectors
        and groups

    vanadium: Nexus event file

    empty_instr: Nexus event file

    lambda_binning: min, max and number of steps for binning in wavelength
                    min and max are in Angstroms

    **absorp: dictionary containing information to correct absorption for Sample and
              Vanadium.
              There could be only up to two elements related to the correction for Vanadium: 
              the radius and height of the cylindrical sample shape.
              To distinguish them from the inputs related to the sample, their names in the 
              dictionary  are 'CylinderVanadiumRadius' and 'CylinderVanadiumHeight'. The other keys
              of the 'absorp' dictionary follow Mantid's syntax and are related to the sample data 
              only.    
              see help of Mantid's algorithm CylinderAbsorption for details
              https://docs.mantidproject.org/nightly/algorithms/CylinderAbsorption-v1.html

    Returns
    -------
    Scipp dataset containing reduced data in d-spacing

    Hints
    -----

    To plot the output data, one can histogram in d-spacing and sum according to groups
    using scipp.histogram and sc.sum, respectively.

    """
    # Load counts
    sample_data = sc.neutron.load(sample,
                                  advanced_geometry=True,
                                  load_pulse_times=False,
                                  mantid_args={'LoadMonitors': True})

    # Load calibration
    if calibration is not None:
        input_load_cal = {"InstrumentName": "WISH"}
        cal = load_calibration(calibration, mantid_args=input_load_cal)
        # Merge table with detector->spectrum mapping from sample
        # (implicitly checking that detectors between sample and calibration are the same)
        cal_sample = sc.merge(cal, sample_data.coords['detector_info'].value)
        # Compute spectrum mask from detector mask
        mask = sc.groupby(cal_sample['mask'], group='spectrum').any('detector')

        # Compute spectrum groups from detector groups
        g = sc.groupby(cal_sample['group'], group='spectrum')

        group = g.min('detector')

        assert group == g.max('detector'), \
            "Calibration table has mismatching group for detectors in same spectrum"

        sample_data.coords['group'] = group.data
        sample_data.masks['mask'] = mask.data

    # Correct 4th monitor spectrum
    # There are 5 monitors for WISH. Only one, the fourth one, is selected for
    # correction (like in the real WISH workflow).

    # Select fourth monitor and convert from tof to wavelength
    mon4_lambda = sc.neutron.convert(sample_data.attrs['monitor4'].values,
                                     'tof', 'wavelength')

    # Spline background
    mon4_spline_background = bspline_background(mon4_lambda,
                                                sc.Dim('wavelength'),
                                                smoothing_factor=70)

    # Smooth monitor
    mon4_smooth = smooth_data(mon4_spline_background,
                              dim='wavelength',
                              NPoints=40)
    # Delete intermediate data
    del mon4_lambda, mon4_spline_background

    # Correct data
    # 1. Normalize to monitor
    # Convert to wavelength (counts)
    sample_lambda = sc.neutron.convert(sample_data, 'tof', 'wavelength')

    # Rebin monitors' data
    lambda_min, lambda_max, number_bins = lambda_binning

    edges_lambda = sc.Variable(['wavelength'],
                               unit=sc.units.angstrom,
                               values=np.linspace(lambda_min,
                                                  lambda_max,
                                                  num=number_bins))
    mon_rebin = sc.rebin(mon4_smooth, 'wavelength', edges_lambda)

    # Realign sample data
    sample_lambda.realign({'wavelength': edges_lambda})
    sample_lambda /= mon_rebin

    del mon_rebin, mon4_smooth

    # 2. absorption correction
    if bool(absorp):
        # Copy dictionary of absorption parameters
        absorp_sample = absorp.copy()
        # Remove input related to Vanadium if present in absorp dictionary
        found_vana_info = [
            key for key in absorp_sample.keys() if 'Vanadium' in key
        ]

        for item in found_vana_info:
            absorp_sample.pop(item, None)

        # Calculate absorption correction for sample data
        correction = absorption_correction(sample, lambda_binning,
                                           **absorp_sample)

        # the 3 following lines of code are to place info about source and sample
        # position at the right place in the correction dataArray in order to
        # proceed to the normalization

        del correction.coords['source_position']
        del correction.coords['sample_position']
        del correction.coords['position']

        correction_rebin = sc.rebin(correction, 'wavelength', edges_lambda)

        del correction

        sample_lambda /= correction_rebin

    del sample_data

    sample_tof = sc.neutron.convert(sample_lambda,
                                    'wavelength',
                                    'tof',
                                    realign='linear')

    del sample_lambda

    # 3. Convert to d-spacing taking calibration into account
    # has to switch to standard conversion in all cases, while support of convert_with_calibration
    # for realign='linear' is implemented
    sample_dspacing = sc.neutron.convert(sample_tof,
                                         'tof',
                                         'd-spacing',
                                         realign='linear')
    del cal_sample

    # if calibration is None:
    #     # No calibration data, use standard convert algorithm
    #     sample_dspacing = sc.neutron.convert(sample_tof, 'tof', 'd-spacing', realign='linear')
    #
    # else:
    #     # Calculate dspacing from calibration file
    #     sample_dspacing = sc.neutron.diffraction.convert_with_calibration(sample_tof, cal_sample)
    #     del cal_sample

    # 4. Focus panels
    # Assuming sample is in d-spacing: Focus into groups
    focused = sc.groupby(sample_dspacing, group='group').sum('spectrum')

    del sample_dspacing

    # 5. Vanadium correction (requires Vanadium and Empty instrument)
    if vanadium is not None and empty_instr is not None:
        print("Proceed with reduction of Vanadium data ")

        vana_red_focused = process_vanadium_data(vanadium, empty_instr,
                                                 lambda_binning, calibration,
                                                 **absorp)

        # The following selection of groups depends on the loaded data for
        # Sample, Vanadium and Empty instrument
        focused = focused['group', 0:5].copy()

        # histogram vanadium for normalizing + cleaning 'metadata'
        vana_histo = sc.histogram(vana_red_focused)
        del vana_red_focused
        vana_histo.coords['detector_info'] = focused.coords[
            'detector_info'].copy()
        del vana_histo.coords['source_position']
        del vana_histo.coords['sample_position']

        # normalize by vanadium
        focused /= vana_histo

        del vana_histo

    return focused