def test_sum(): var = sc.Variable(['x', 'y'], values=np.arange(4.0).reshape(2, 2)) assert sc.is_equal(sc.sum(var), sc.Variable(value=6.0)) assert sc.is_equal(sc.sum(var, 'x'), sc.Variable(dims=['y'], values=[2.0, 4.0])) out = sc.Variable(dims=['y'], values=np.zeros(2), dtype=sc.dtype.float64) sc.sum(var, 'x', out) assert sc.is_equal(out, sc.Variable(dims=['y'], values=[2.0, 4.0]))
def select_bins(array, dim, start, end): coord = array.coords[dim] edges = coord.shape[0] # scipp treats bins as closed on left and open on right: [left, right) first = sc.sum(sc.less_equal(coord, start), dim).value - 1 last = edges - sc.sum(sc.greater(coord, end), dim).value assert first >= 0 assert last < edges return array[dim, first:last + 1]
def make_slices(var, dim, cutting_points): points = var.shape[0] slices = [] for i in range(cutting_points.shape[0] - 1): start = cutting_points[dim, i] end = cutting_points[dim, i + 1] # scipp treats ranges as closed on left and open on right: [left, right) first = sc.sum(sc.less(var, start), dim).value last = points - sc.sum(sc.greater_equal(var, end), dim).value assert first >= 0 assert last <= points slices.append(slice(first, last)) return slices
def test_EventWorkspace(self): import mantid.simpleapi as mantid eventWS = mantid.CloneWorkspace(self.base_event_ws) ws = mantid.Rebin(eventWS, 10000) binned_mantid = mantidcompat.convert_Workspace2D_to_data_array(ws) target_tof = binned_mantid.coords[sc.Dim.Tof] d = mantidcompat.convert_EventWorkspace_to_data_array(eventWS, False) binned = sc.histogram(d, target_tof) delta = sc.sum(binned_mantid - binned, sc.Dim.Spectrum) delta = sc.sum(delta, sc.Dim.Tof) self.assertLess(np.abs(delta.value), 1e-5)
def test_EventWorkspace(self): import mantid.simpleapi as mantid eventWS = self.base_event_ws ws = mantid.Rebin(eventWS, 10000) binned_mantid = scn.mantid.convert_Workspace2D_to_data_array(ws) target_tof = binned_mantid.coords['tof'] d = scn.mantid.convert_EventWorkspace_to_data_array( eventWS, load_pulse_times=False) binned = sc.histogram(d, bins=target_tof) delta = sc.sum(binned_mantid - binned, 'spectrum') delta = sc.sum(delta, 'tof') self.assertLess(np.abs(delta.value), 1e-5)
def test_mean_all(): var = sc.Variable(['x', 'y'], values=np.arange(4.0).reshape(2, 2)) mask = sc.Variable(['x', 'y'], values=np.array([[False, False], [True, False]])) da = sc.DataArray(var, masks={'m': mask}) # Add masks assert sc.sum(da).data.value == 0 + 1 + 3 # 2.0 masked sc.mean(da).data.value == 4 / 3
def test_sum_mean(): d = sc.Dataset( { 'a': sc.Variable(dims=['x', 'y'], values=np.arange(6, dtype=np.int64).reshape(2, 3)), 'b': sc.Variable(dims=['y'], values=np.arange(3, dtype=np.int64)) }, coords={ 'x': sc.Variable(dims=['x'], values=np.arange(2, dtype=np.int64)), 'y': sc.Variable(dims=['y'], values=np.arange(3, dtype=np.int64)), 'l1': sc.Variable(dims=['x', 'y'], values=np.arange(6, dtype=np.int64).reshape(2, 3)), 'l2': sc.Variable(dims=['x'], values=np.arange(2, dtype=np.int64)) }) d_ref = sc.Dataset( { 'a': sc.Variable(dims=['x'], values=np.array([3, 12], dtype=np.int64)), 'b': sc.Variable(3) }, coords={ 'x': sc.Variable(dims=['x'], values=np.arange(2, dtype=np.int64)), 'l2': sc.Variable(dims=['x'], values=np.arange(2, dtype=np.int64)) }) assert sc.sum(d, 'y') == d_ref assert (sc.mean(d, 'y')['a'].values == [1.0, 4.0]).all() assert sc.mean(d, 'y')['b'].value == 1.0
def test_mdhisto_workspace_q(self): from mantid.simpleapi import (CreateMDWorkspace, FakeMDEventData, BinMD) md_event = CreateMDWorkspace(Dimensions=3, Extents=[-10, 10, -10, 10, -10, 10], Names='Q_x,Q_y,Q_z', Units='U,U,U', Frames='QLab,QLab,QLab', StoreInADS=False) FakeMDEventData(InputWorkspace=md_event, PeakParams=[100000, 0, 0, 0, 1], StoreInADS=False) # Add Peak md_histo = BinMD(InputWorkspace=md_event, AlignedDim0='Q_y,-10,10,3', AlignedDim1='Q_x,-10,10,4', AlignedDim2='Q_z,-10,10,5', StoreInADS=False) histo_data_array = mantidcompat.convert_MDHistoWorkspace_to_data_array( md_histo) self.assertEqual(histo_data_array.coords[sc.Dim.Qx].values.shape, (4, )) self.assertEqual(histo_data_array.coords[sc.Dim.Qy].values.shape, (3, )) self.assertEqual(histo_data_array.coords[sc.Dim.Qz].values.shape, (5, )) self.assertEqual(histo_data_array.coords[sc.Dim.Qx].unit, sc.units.dimensionless / sc.units.angstrom) self.assertEqual(histo_data_array.coords[sc.Dim.Qy].unit, sc.units.dimensionless / sc.units.angstrom) self.assertEqual(histo_data_array.coords[sc.Dim.Qz].unit, sc.units.dimensionless / sc.units.angstrom) self.assertEquals(histo_data_array.values.shape, (3, 4, 5)) # Sum over 2 dimensions to simplify finding max. max_1d = sc.sum(sc.sum(histo_data_array, dim=sc.Dim.Qy), dim=sc.Dim.Qx).values max_index = np.argmax(max_1d) # Check position of max 'peak' self.assertEqual(np.floor(len(max_1d) / 2), max_index) # All events in central 'peak' self.assertEqual(100000, max_1d[max_index]) self.assertTrue('nevents' in histo_data_array.attrs)
def reduce(data, q_bins): data = sc.neutron.convert(data, 'wavelength', 'Q', out=data) # TODO no gravity yet data = sc.histogram(data, q_bins) if 'layer' in data.coords: return sc.groupby(data, 'layer').sum('spectrum') else: return sc.sum(data, 'spectrum')
def test_nansum_all(): da = sc.DataArray(sc.Variable(['x', 'y'], values=np.ones(10).reshape(5, 2))) da.data.values[0, 0] = np.nan ds = sc.Dataset({'a': da}) assert np.isnan(sc.sum(da).data.value) # sanity check assert sc.is_equal(sc.nansum(da).data, sc.Variable(value=9.0)) assert sc.is_equal(sc.nansum(da), sc.nansum(ds)['a'])
def test_sum(): var = sc.Variable(dims=['x', 'y'], values=np.array([[0.1, 0.3], [0.2, 0.6]]), unit=sc.units.m) expected = sc.Variable(dims=['x'], values=np.array([0.4, 0.8]), unit=sc.units.m) assert sc.is_equal(sc.sum(var, 'y'), expected)
def test_sum(): var = sc.Variable([Dim.X, Dim.Y], values=np.array([[0.1, 0.3], [0.2, 0.6]]), unit=sc.units.m) expected = sc.Variable([Dim.X], values=np.array([0.4, 0.8]), unit=sc.units.m) assert sc.sum(var, Dim.Y) == expected
def _sum_remaining_dims(data: sc.DataArray, dim: str) -> sc.DataArray: """ Sum all dims in `data` except `dim`. """ to_be_summed = set(data.dims) - set([dim]) summed = data for dim_ in to_be_summed: summed = sc.sum(summed, dim_) return summed
def test_sum_in_place(): var = sc.Variable([Dim.X, Dim.Y], values=np.array([[0.1, 0.3], [0.2, 0.6]]), unit=sc.units.m) out_var = sc.Variable([Dim.X], values=np.array([0.0, 0.0]), unit=sc.units.m) expected = sc.Variable([Dim.X], values=np.array([0.4, 0.8]), unit=sc.units.m) out_view = sc.sum(var, Dim.Y, out=out_var) assert out_var == expected assert out_view == expected
def test_sum_in_place(): var = sc.Variable(dims=['x', 'y'], values=np.array([[0.1, 0.3], [0.2, 0.6]]), unit=sc.units.m) out_var = sc.Variable(dims=['x'], values=np.array([0.0, 0.0]), unit=sc.units.m) expected = sc.Variable(dims=['x'], values=np.array([0.4, 0.8]), unit=sc.units.m) out_view = sc.sum(var, 'y', out=out_var) assert sc.is_equal(out_var, expected) assert sc.is_equal(out_view, expected)
def test_EventWorkspace(self): # This is from the Mantid system-test data filename = 'CNCS_51936_event.nxs' eventWS = mantid.LoadEventNexus(filename) ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False) binned_mantid = mantidcompat.to_dataset(ws) tof = sp.Variable(binned_mantid[sp.Coord.Tof]) d = mantidcompat.to_dataset(eventWS) binned = sp.histogram(d, tof) delta = sp.sum(binned_mantid - binned, sp.Dim.Position) print(delta)
def test_sum_masked(): d = sc.Dataset({ 'a': sc.Variable(dims=['x'], values=np.array([1, 5, 4, 5, 1], dtype=np.int64)) }) d['a'].masks['m1'] = sc.Variable(dims=['x'], values=np.array( [False, True, False, True, False])) d_ref = sc.Dataset({'a': sc.Variable(np.int64(6))}) result = sc.sum(d, 'x')['a'] assert sc.is_equal(result, d_ref['a'])
def _check_lambda_inside_resolution(lam, dlam_over_lam, data, event_mode=False, check_value=True): dlam = 0.5 * dlam_over_lam * lam if event_mode: sum_in_range = sc.bin(data, edges=[ sc.array(dims=['wavelength'], values=[(lam - dlam).value, (lam + dlam).value], unit=lam.unit) ]).bins.sum().data['wavelength', 0] else: sum_in_range = sc.sum(data['wavelength', lam - dlam:lam + dlam]).data assert sc.isclose(sum_in_range, 1.0 * sc.units.counts).value is check_value
def test_sum_masked(): d = sc.Dataset( { 'a': sc.Variable(dims=['x'], values=np.array([1, 5, 4, 5, 1], dtype=np.int64)) }, masks={ 'm1': sc.Variable(dims=['x'], values=np.array([False, True, False, True, False])) }) d_ref = sc.Dataset({'a': sc.Variable(np.int64(6))}) result = sc.sum(d, 'x')['a'] assert result == d_ref['a']
def test_advanced_geometry_with_absent_shape(self): import mantid.simpleapi as mantid # single bank 3 by 3 ws = mantid.CreateSampleWorkspace(NumBanks=1, BankPixelWidth=3, StoreInADS=False) # Save and reload trick to purge sample shape info file_name = "example_geometry.nxs" geom_path = os.path.join(tempfile.gettempdir(), file_name) mantid.SaveNexusGeometry(ws, geom_path) # Does not save shape info assert os.path.isfile(geom_path) # sanity check out = mantid.LoadEmptyInstrument( Filename=geom_path, StoreInADS=False) # reload without sample info os.remove(geom_path) assert not out.componentInfo().hasValidShape(0) # sanity check da = scn.mantid.from_mantid(out, advanced_geometry=True) # Shapes have zero size assert sc.identical(sc.sum(da.meta['shape']), sc.vector(value=[0, 0, 0], unit=sc.units.m))
def test_unit_conversion(self): # This is from the Mantid system-test data filename = 'CNCS_51936_event.nxs' eventWS = mantid.LoadEventNexus(filename) ws = mantid.Rebin(eventWS, -0.001, PreserveEvents=False) tmp = mantidcompat.to_dataset(ws) tof = sp.Variable(tmp[sp.Coord.Tof]) ws = mantid.ConvertUnits(InputWorkspace=ws, Target='DeltaE', EMode='Direct', EFixed=3.3056) converted_mantid = mantidcompat.to_dataset(ws) converted_mantid[sp.Coord.Ei] = ([], 3.3059) d = mantidcompat.to_dataset(eventWS, drop_pulse_times=True) d[sp.Coord.Ei] = ([], 3.3059) d.merge(sp.histogram(d, tof)) del(d[sp.Data.Events]) converted = sp.convert(d, sp.Dim.Tof, sp.Dim.DeltaE) delta = sp.sum(converted_mantid - converted, sp.Dim.Position) print(delta)
def test_sum_mean(): var = sp.Variable([Dim.X], values=np.ones(5).astype(np.int64)) assert sp.sum(var, Dim.X) == sp.Variable(5) var = sp.Variable([Dim.X], values=np.arange(6).astype(np.int64)) assert sp.mean(var, Dim.X) == sp.Variable(2.5)
def test_sum_mean(): var = sc.Variable([Dim.X], values=np.arange(5, dtype=np.int64)) assert sc.sum(var, Dim.X) == sc.Variable(10) var = sc.Variable([Dim.X], values=np.arange(6, dtype=np.int64)) assert sc.mean(var, Dim.X) == sc.Variable(2.5)
def test_sum_mean(): var = sc.Variable(dims=['x'], values=np.arange(5, dtype=np.int64)) assert sc.is_equal(sc.sum(var, 'x'), sc.Variable(10)) var = sc.Variable(dims=['x'], values=np.arange(6, dtype=np.int64)) assert sc.is_equal(sc.mean(var, 'x'), sc.Variable(2.5))
def simple_reducer(*, dim): return lambda x: sc.sum(x, dim=dim)
def test_sum_all(): da = sc.DataArray(sc.Variable(['x', 'y'], values=np.ones(10).reshape(5, 2))) ds = sc.Dataset({'a': da}) assert sc.is_equal(sc.sum(da).data, sc.Variable(value=10.0)) assert sc.is_equal(sc.sum(da), sc.sum(ds)['a'])