def test_temperature_log_is_time_series(self): outputWorkspaceName = "output_ws" EditInstrumentGeometry(self._input_ws, L2="4,8", Polar="0,15", Azimuthal="0,0", DetectorIDs="1,2") AddTimeSeriesLog(self._input_ws, 'temperature', '2010-09-14T04:20:12', Value='0.0') AddTimeSeriesLog(self._input_ws, 'temperature', '2010-09-14T04:20:13', Value='0.0') AddTimeSeriesLog(self._input_ws, 'temperature', '2010-09-14T04:20:14', Value='0.0') alg_test = run_algorithm("ComputeCalibrationCoefVan", VanadiumWorkspace=self._input_ws, EPPTable=self._table, OutputWorkspace=outputWorkspaceName) self.assertTrue(alg_test.isExecuted()) wsoutput = AnalysisDataService.retrieve(outputWorkspaceName) self._checkDWF(wsoutput, 0.0)
def test_generate_plot_command_returns_correct_string_for_sample_log(self): kwargs = copy(LINE2D_KWARGS) kwargs["drawstyle"] = 'steps-post' kwargs.update({ "LogName": "my_log", "ExperimentInfo": 0, "Filtered": True }) # add a log AddTimeSeriesLog(self.test_ws, Name="my_log", Time="2010-01-01T00:00:00", Value=100) AddTimeSeriesLog(self.test_ws, Name="my_log", Time="2010-01-01T00:30:00", Value=15) AddTimeSeriesLog(self.test_ws, Name="my_log", Time="2010-01-01T00:50:00", Value=100.2) line = self.ax.plot(self.test_ws, **kwargs)[0] output = generate_plot_command(line) expected_command = ("plot({}, {})".format( self.test_ws.name(), convert_args_to_string(None, kwargs))) self.assertEqual(expected_command, output)
def setUpClass(cls): cls.g1da = config['graph1d.autodistribution'] config['graph1d.autodistribution'] = 'On' cls.ws2d_histo = CreateWorkspace(DataX=[10, 20, 30, 10, 20, 30], DataY=[2, 3, 4, 5], DataE=[1, 2, 3, 4], NSpec=2, Distribution=True, YUnitLabel="Counts per $\\AA$", UnitX='Wavelength', VerticalAxisUnit='DeltaE', VerticalAxisValues=[4, 6, 8], OutputWorkspace='ws2d_histo') cls.ws2d_histo_non_dist = CreateWorkspace(DataX=[10, 20, 30, 10, 20, 30], DataY=[2, 3, 4, 5], DataE=[1, 2, 3, 4], NSpec=2, Distribution=False, YUnitLabel='Counts', UnitX='Wavelength', OutputWorkspace='ws2d_histo_non_dist') cls.ws2d_histo_rag = CreateWorkspace(DataX=[1, 2, 3, 4, 5, 2, 4, 6, 8, 10], DataY=[2] * 8, NSpec=2, VerticalAxisUnit='DeltaE', VerticalAxisValues=[5, 7, 9], OutputWorkspace='ws2d_histo_rag') cls.ws_MD_2d = CreateMDHistoWorkspace(Dimensionality=3, Extents='-3,3,-10,10,-1,1', SignalInput=range(25), ErrorInput=range(25), NumberOfEvents=10 * np.ones(25), NumberOfBins='5,5,1', Names='Dim1,Dim2,Dim3', Units='MomentumTransfer,EnergyTransfer,Angstrom', OutputWorkspace='ws_MD_2d') cls.ws_MD_1d = CreateMDHistoWorkspace(Dimensionality=3, Extents='-3,3,-10,10,-1,1', SignalInput=range(5), ErrorInput=range(5), NumberOfEvents=10 * np.ones(5), NumberOfBins='1,5,1', Names='Dim1,Dim2,Dim3', Units='MomentumTransfer,EnergyTransfer,Angstrom', OutputWorkspace='ws_MD_1d') cls.ws2d_point_uneven = CreateWorkspace(DataX=[10, 20, 30], DataY=[1, 2, 3], NSpec=1, OutputWorkspace='ws2d_point_uneven') wp = CreateWorkspace(DataX=[15, 25, 35, 45], DataY=[1, 2, 3, 4], NSpec=1) ConjoinWorkspaces(cls.ws2d_point_uneven, wp, CheckOverlapping=False) cls.ws2d_point_uneven = mantid.mtd['ws2d_point_uneven'] cls.ws2d_histo_uneven = CreateWorkspace(DataX=[10, 20, 30, 40], DataY=[1, 2, 3], NSpec=1, OutputWorkspace='ws2d_histo_uneven') AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:00:00", Value=100) AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:30:00", Value=15) AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:50:00", Value=100.2)
def test_create_results_table_with_logs_selected(self): workspace = CreateWorkspace([0, 1, 2, 3, 4, 5], [0, 1, 2, 3, 4, 5]) workspace.mutableRun().addProperty( "run_start", "1970-01-01T00:00:01 to 1970-01-01T00:00:01", True) AddTimeSeriesLog(workspace, Name="sample_temp", Time="2010-01-01T00:00:00", Value=100) AddTimeSeriesLog(workspace, Name="sample_temp", Time="2010-01-01T00:30:00", Value=65) AddTimeSeriesLog(workspace, Name="sample_temp", Time="2010-01-01T00:50:00", Value=100.2) workspace.mutableRun().addProperty("sample_magn_field", 2, True) _, model = create_test_model( ('ws1', ), 'func1', self.parameters, [StaticWorkspaceWrapper('ws1', workspace)], self.logs) selected_results = [('ws1', 0)] table = model.create_results_table(self.log_names, selected_results) # workspace_name => no error col as its a string # sample_temp => time series and will have non-zero error # sample_magn_field => just a number expected_cols = [ 'workspace_name', 'run_start', 'run_start_seconds', 'sample_temp', 'sample_tempError', 'sample_magn_field', 'sample_magn_fieldError', 'f0.Height', 'f0.HeightError', 'f0.PeakCentre', 'f0.PeakCentreError', 'f0.Sigma', 'f0.SigmaError', 'f1.Height', 'f1.HeightError', 'f1.PeakCentre', 'f1.PeakCentreError', 'f1.Sigma', 'f1.SigmaError', 'Cost function value' ] expected_types = (TableColumnType.NoType, TableColumnType.X, TableColumnType.X, TableColumnType.X, TableColumnType.XErr, TableColumnType.X, TableColumnType.XErr, TableColumnType.Y, TableColumnType.YErr, TableColumnType.Y, TableColumnType.YErr, TableColumnType.Y, TableColumnType.YErr, TableColumnType.Y, TableColumnType.YErr, TableColumnType.Y, TableColumnType.YErr, TableColumnType.Y, TableColumnType.YErr, TableColumnType.Y) avg_log_values = "1970-01-01T00:00:01 to 1970-01-01T00:00:01", 1, 86., 2.0 expected_content = [ ('ws1_Parameters', avg_log_values[0], avg_log_values[1], avg_log_values[2], 17.146, avg_log_values[3], 0., self.f0_height[0], self.f0_height[1], self.f0_centre[0], self.f0_centre[1], self.f0_sigma[0], self.f0_sigma[1], self.f1_height[0], self.f1_height[1], self.f1_centre[0], self.f1_centre[1], self.f1_sigma[0], self.f1_sigma[1], self.cost_function[0]) ] self._assert_table_matches_expected(zip(expected_cols, expected_types), expected_content, table, model.results_table_name())
def provide_workspace_with_proton_charge(output_name, is_event=True): ws_type = "Event" if is_event else "Histogram" # AddTimeSeriesLog forces us to store in ADS dummy_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=2, WorkspaceType=ws_type, OutputWorkspace=output_name) # Provide a proton charge time = DateAndTime("2010-01-01T00:10:00") value = 1.0 for index in range(0, 10): time += 1000000000 AddTimeSeriesLog(Workspace=dummy_ws, Name="proton_charge", Type="double", Time=str(time), Value=value) return dummy_ws
def _create_event_workspace(self, run_number, prefix='', includeMonitors=True): name = prefix + str(run_number) CreateSampleWorkspace(WorkspaceType='Event', NumBanks=1, NumMonitors=3, BankPixelWidth=2, XMin=200, OutputWorkspace=name) if includeMonitors: CropWorkspace(InputWorkspace=name, StartWorkspaceIndex=0, EndWorkspaceIndex=2, OutputWorkspace=name + '_monitors') Rebin(InputWorkspace=name + '_monitors', Params='0,200,20000', OutputWorkspace=name + '_monitors', PreserveEvents=False) CropWorkspace(InputWorkspace=name, StartWorkspaceIndex=3, EndWorkspaceIndex=4, OutputWorkspace=name) AddSampleLog(Workspace=name, LogName='run_number', LogText=str(run_number)) AddTimeSeriesLog(Workspace=name, Name="proton_charge", Time="2010-01-01T00:00:00", Value=100) AddTimeSeriesLog(Workspace=name, Name="proton_charge", Time="2010-01-01T00:10:00", Value=100) AddTimeSeriesLog(Workspace=name, Name="proton_charge", Time="2010-01-01T00:20:00", Value=80) AddTimeSeriesLog(Workspace=name, Name="proton_charge", Time="2010-01-01T00:30:00", Value=80) AddTimeSeriesLog(Workspace=name, Name="proton_charge", Time="2010-01-01T00:40:00", Value=15) AddTimeSeriesLog(Workspace=name, Name="proton_charge", Time="2010-01-01T00:50:00", Value=100)
def runTest(self): # Na Mn Cl3 # R -3 H (148) # 6.592 6.592 18.585177 90 90 120 # UB/wavelength from /HFIR/HB3A/IPTS-25470/shared/autoreduce/HB3A_exp0769_scan0040.nxs ub = np.array([[1.20297e-01, 1.70416e-01, 1.43000e-04], [8.16000e-04, -8.16000e-04, 5.38040e-02], [1.27324e-01, -4.05110e-02, -4.81000e-04]]) wavelength = 1.553 # create fake MDEventWorkspace, similar to what is expected from exp769 after loading with HB3AAdjustSampleNorm MD_Q_sample = CreateMDWorkspace( Dimensions='3', Extents='-5,5,-5,5,-5,5', Names='Q_sample_x,Q_sample_y,Q_sample_z', Units='rlu,rlu,rlu', Frames='QSample,QSample,QSample') inst = LoadEmptyInstrument(InstrumentName='HB3A') AddTimeSeriesLog(inst, 'omega', '2010-01-01T00:00:00', 0.) AddTimeSeriesLog(inst, 'phi', '2010-01-01T00:00:00', 0.) AddTimeSeriesLog(inst, 'chi', '2010-01-01T00:00:00', 0.) MD_Q_sample.addExperimentInfo(inst) SetUB(MD_Q_sample, UB=ub) ol = OrientedLattice() ol.setUB(ub) sg = SpaceGroupFactory.createSpaceGroup("R -3") hkl = [] sat_hkl = [] for h in range(0, 6): for k in range(0, 6): for l in range(0, 11): if sg.isAllowedReflection([h, k, l]): if h == k == l == 0: continue q = V3D(h, k, l) q_sample = ol.qFromHKL(q) if not np.any(np.array(q_sample) > 5): hkl.append(q) FakeMDEventData( MD_Q_sample, PeakParams='1000,{},{},{},0.05'.format( *q_sample)) # satellite peaks at 0,0,+1.5 q = V3D(h, k, l + 1.5) q_sample = ol.qFromHKL(q) if not np.any(np.array(q_sample) > 5): sat_hkl.append(q) FakeMDEventData( MD_Q_sample, PeakParams='100,{},{},{},0.02'.format( *q_sample)) # satellite peaks at 0,0,-1.5 q = V3D(h, k, l - 1.5) q_sample = ol.qFromHKL(q) if not np.any(np.array(q_sample) > 5): sat_hkl.append(q) FakeMDEventData( MD_Q_sample, PeakParams='100,{},{},{},0.02'.format( *q_sample)) # Check that this fake workpsace gives us the expected UB peaks = FindPeaksMD(MD_Q_sample, PeakDistanceThreshold=1, OutputType='LeanElasticPeak') FindUBUsingFFT(peaks, MinD=5, MaxD=20) ShowPossibleCells(peaks) SelectCellOfType(peaks, CellType='Rhombohedral', Centering='R', Apply=True) OptimizeLatticeForCellType(peaks, CellType='Hexagonal', Apply=True) found_ol = peaks.sample().getOrientedLattice() self.assertAlmostEqual(found_ol.a(), 6.592, places=2) self.assertAlmostEqual(found_ol.b(), 6.592, places=2) self.assertAlmostEqual(found_ol.c(), 18.585177, places=2) self.assertAlmostEqual(found_ol.alpha(), 90) self.assertAlmostEqual(found_ol.beta(), 90) self.assertAlmostEqual(found_ol.gamma(), 120) # nuclear peaks predict = HB3APredictPeaks( MD_Q_sample, Wavelength=wavelength, ReflectionCondition='Rhombohedrally centred, obverse', SatellitePeaks=True, IncludeIntegerHKL=True) predict = HB3AIntegratePeaks(MD_Q_sample, predict, 0.25) self.assertEqual(predict.getNumberPeaks(), 66) # check that the found peaks are expected for n in range(predict.getNumberPeaks()): HKL = predict.getPeak(n).getHKL() self.assertTrue(HKL in hkl, msg=f"Peak {n} with HKL={HKL}") # magnetic peaks satellites = HB3APredictPeaks( MD_Q_sample, Wavelength=wavelength, ReflectionCondition='Rhombohedrally centred, obverse', SatellitePeaks=True, ModVector1='0,0,1.5', MaxOrder=1, IncludeIntegerHKL=False) satellites = HB3AIntegratePeaks(MD_Q_sample, satellites, 0.1) self.assertEqual(satellites.getNumberPeaks(), 80) # check that the found peaks are expected for n in range(satellites.getNumberPeaks()): HKL = satellites.getPeak(n).getHKL() self.assertTrue(HKL in sat_hkl, msg=f"Peak {n} with HKL={HKL}")
def setUpClass(cls): cls.g1da = config['graph1d.autodistribution'] config['graph1d.autodistribution'] = 'On' cls.ws2d_non_distribution = CreateWorkspace( DataX=[10, 20, 30, 10, 20, 30], DataY=[2, 3, 4, 5], DataE=[1, 2, 3, 4], NSpec=2, Distribution=False, UnitX='Wavelength', YUnitLabel='Counts per microAmp.hour', VerticalAxisUnit='DeltaE', VerticalAxisValues=[4, 6, 8], OutputWorkspace='ws2d_non_distribution') cls.ws2d_distribution = CreateWorkspace( DataX=[10, 20, 30, 10, 20, 30], DataY=[2, 3, 4, 5, 6], DataE=[1, 2, 3, 4, 6], NSpec=1, Distribution=True, UnitX='Wavelength', YUnitLabel='Counts per microAmp.hour', OutputWorkspace='ws2d_distribution') cls.ws2d_histo = CreateWorkspace(DataX=[10, 20, 30, 10, 20, 30], DataY=[2, 3, 4, 5], DataE=[1, 2, 3, 4], NSpec=2, Distribution=True, UnitX='Wavelength', VerticalAxisUnit='DeltaE', VerticalAxisValues=[4, 6, 8], OutputWorkspace='ws2d_histo') cls.ws2d_point = CreateWorkspace( DataX=[1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4], DataY=[2] * 12, NSpec=3, OutputWorkspace='ws2d_point') cls.ws1d_point = CreateWorkspace(DataX=[1, 2], DataY=[1, 2], NSpec=1, Distribution=False, OutputWorkspace='ws1d_point') cls.ws2d_histo_rag = CreateWorkspace( DataX=[1, 2, 3, 4, 5, 2, 4, 6, 8, 10], DataY=[2] * 8, NSpec=2, VerticalAxisUnit='DeltaE', VerticalAxisValues=[5, 7, 9], OutputWorkspace='ws2d_histo_rag') cls.ws2d_point_rag = CreateWorkspace(DataX=[1, 2, 3, 4, 2, 4, 6, 8], DataY=[2] * 8, NSpec=2, OutputWorkspace='ws2d_point_rag') cls.ws_MD_2d = CreateMDHistoWorkspace( Dimensionality=3, Extents='-3,3,-10,10,-1,1', SignalInput=range(25), ErrorInput=range(25), NumberOfEvents=10 * np.ones(25), NumberOfBins='5,5,1', Names='Dim1,Dim2,Dim3', Units='MomentumTransfer,EnergyTransfer,Angstrom', OutputWorkspace='ws_MD_2d') cls.ws_MD_1d = CreateMDHistoWorkspace( Dimensionality=3, Extents='-3,3,-10,10,-1,1', SignalInput=range(5), ErrorInput=range(5), NumberOfEvents=10 * np.ones(5), NumberOfBins='1,5,1', Names='Dim1,Dim2,Dim3', Units='MomentumTransfer,EnergyTransfer,Angstrom', OutputWorkspace='ws_MD_1d') cls.ws2d_point_uneven = CreateWorkspace( DataX=[10, 20, 30], DataY=[1, 2, 3], NSpec=1, OutputWorkspace='ws2d_point_uneven') cls.ws2d_high_counting_detector = CreateWorkspace( DataX=[1, 2, 3, 4] * 1000, DataY=[2] * 4 * 12 + [200] * 4 + [2] * 987 * 4, NSpec=1000, OutputWorkspace='ws2d_high_counting_detector') wp = CreateWorkspace(DataX=[15, 25, 35, 45], DataY=[1, 2, 3, 4], NSpec=1) ConjoinWorkspaces(cls.ws2d_point_uneven, wp, CheckOverlapping=False) cls.ws2d_point_uneven = mantid.mtd['ws2d_point_uneven'] cls.ws2d_histo_uneven = CreateWorkspace( DataX=[10, 20, 30, 40], DataY=[1, 2, 3], NSpec=1, OutputWorkspace='ws2d_histo_uneven') wp = CreateWorkspace(DataX=[15, 25, 35, 45, 55], DataY=[1, 2, 3, 4], NSpec=1) ConjoinWorkspaces(cls.ws2d_histo_uneven, wp, CheckOverlapping=False) cls.ws2d_histo_uneven = mantid.mtd['ws2d_histo_uneven'] newYAxis = mantid.api.NumericAxis.create(3) newYAxis.setValue(0, 10) newYAxis.setValue(1, 15) newYAxis.setValue(2, 25) cls.ws2d_histo_uneven.replaceAxis(1, newYAxis) AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:00:00", Value=100) AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:30:00", Value=15) AddTimeSeriesLog(cls.ws2d_histo, Name="my_log", Time="2010-01-01T00:50:00", Value=100.2)