def create_test_workspace(ws_name=None, time_series_logs=None, string_value_logs=None): """ Create a test workspace. :param ws_name: An optional name for the workspace :param time_series_logs: A set of (name, (values,...)) :param string_value_logs: A set of (name, value) pairs :return: The new workspace """ fake_ws = WorkspaceFactory.create('Workspace2D', 1, 1, 1) run = fake_ws.run() if time_series_logs is not None: for name, values in time_series_logs: tsp = FloatTimeSeriesProperty(name) for item in values: try: time, value = item[0], item[1] except TypeError: time, value = "2000-05-01T12:00:00", item tsp.addValue(time, value) run.addProperty(name, tsp, replace=True) if string_value_logs is not None: for name, value in string_value_logs: run.addProperty(name, StringPropertyWithValue(name, value), replace=True) ws_name = ws_name if ws_name is not None else 'fitting_context_model_test' AnalysisDataService.Instance().addOrReplace(ws_name, fake_ws) return fake_ws
def add_metadata(self, ws, metadata, data): """Adds metadata to the workspace""" run = ws.getRun() # Just copy all metadata in the file for key in metadata.keys(): run.addProperty(key, str(metadata[key]), True) # Add correct start and end time start_time = np.datetime64( datetime.datetime.strptime( metadata['time'] + ' ' + metadata['date'], '%I:%M:%S %p %m/%d/%Y')) run.addProperty('start_time', str(start_time), True) # Create time array for time series logs time_array = start_time + np.cumsum( data['time'], dtype=np.int64) * np.timedelta64(1, 's') run.addProperty('end_time', str(time_array[-1]), True) run.addProperty( 'duration', float((time_array[-1] - time_array[0]) / np.timedelta64(1, 's')), True) # Create time series logs for the scan variables for name in data.dtype.names: if 'anode' not in name: log = FloatTimeSeriesProperty(name) for t, v in zip(time_array, data[name]): log.addValue(t, v) run[name] = log
def test_timestd(self): """ """ run = Run() start_time = DateAndTime("2008-12-18T17:58:38") nanosec = 1000000000 # === Float type === temp1 = FloatTimeSeriesProperty("TEMP1") vals = np.arange(10) * 2. for i in range(10): temp1.addValue(start_time + i * nanosec, vals[i]) run.addProperty(temp1.name, temp1, True) # ignore the last value expected = vals[:-1].std() self.assertEqual(run.getTimeAveragedStd("TEMP1"), expected)
def setUp(self): if self._test_ws is not None: return alg = run_algorithm('CreateWorkspace', DataX=[1, 2, 3, 4, 5], DataY=[1, 2, 3, 4, 5], NSpec=1, child=True) ws = alg.getProperty("OutputWorkspace").value run = ws.run() start_time = DateAndTime("2008-12-18T17:58:38") nanosec = 1000000000 # === Float type === temp1 = FloatTimeSeriesProperty("TEMP1") tempvalue = -0.00161 for i in range(self._ntemp): temp1.addValue(start_time + i * nanosec, tempvalue) run.addProperty(temp1.name, temp1, True) # === Int type === raw_frames = Int64TimeSeriesProperty("raw_frames") values = [17, 1436, 2942, 4448, 5955, 7461] for value in values: raw_frames.addValue(start_time + i * nanosec, value) run.addProperty(raw_frames.name, raw_frames, True) # === String type === icp_event = temp1 = StringTimeSeriesProperty("icp_event") values = [ 'CHANGE_PERIOD 1', 'START_COLLECTION PERIOD 1 GF 0 RF 0 GUAH 0.000000', 'BEGIN', 'STOP_COLLECTION PERIOD 1 GF 1053 RF 1053 GUAH 0.000000 DUR 22' ] for value in values: icp_event.addValue(start_time + i * nanosec, value) run.addProperty(icp_event.name, icp_event, True) # === Boolean type === period_1 = temp1 = BoolTimeSeriesProperty("period 1") values = [True] for value in values: period_1.addValue(start_time + i * nanosec, value) run.addProperty(period_1.name, period_1, True) self.__class__._test_ws = ws
def add_logs(workspace_name, logs): """ Add a list of logs to a workspace :param workspace_name: A workspace to contain the logs :param logs: A list of logs and values :return: The workspace reference """ workspace = create_test_workspace(workspace_name) run = workspace.run() # populate with log data dt_format = "%Y-%m-%dT%H:%M:%S" for name, values in logs: tsp = FloatTimeSeriesProperty(name) time = datetime.datetime.strptime("2019-05-30T09:00:00", dt_format) for value in values: tsp.addValue(time.strftime(dt_format), float(value)) time += datetime.timedelta(seconds=5) run.addProperty(name, tsp, replace=True) return workspace
def setUp(self): if self._test_ws is not None: return alg = run_algorithm('CreateWorkspace', DataX=[1,2,3,4,5], DataY=[1,2,3,4,5],NSpec=1, child=True) ws = alg.getProperty("OutputWorkspace").value run = ws.run() start_time = DateAndTime("2008-12-18T17:58:38") nanosec = 1000000000 # === Float type === temp1 = FloatTimeSeriesProperty("TEMP1") tempvalue = -0.00161 for i in range(self._ntemp): temp1.addValue(start_time + i*nanosec, tempvalue) run.addProperty(temp1.name, temp1,True) # === Int type === raw_frames = Int64TimeSeriesProperty("raw_frames") values = [17,1436,2942,4448,5955,7461] for value in values: raw_frames.addValue(start_time + i*nanosec, value) run.addProperty(raw_frames.name, raw_frames,True) # === String type === icp_event = temp1 = StringTimeSeriesProperty("icp_event") values = ['CHANGE_PERIOD 1','START_COLLECTION PERIOD 1 GF 0 RF 0 GUAH 0.000000', 'BEGIN','STOP_COLLECTION PERIOD 1 GF 1053 RF 1053 GUAH 0.000000 DUR 22'] for value in values: icp_event.addValue(start_time + i*nanosec, value) run.addProperty(icp_event.name, icp_event,True) # === Boolean type === period_1 = temp1 = BoolTimeSeriesProperty("period 1") values = [True] for value in values: period_1.addValue(start_time + i*nanosec, value) run.addProperty(period_1.name, period_1,True) self.__class__._test_ws = ws
def add_metadata(self, ws, metadata, data): """Adds metadata to the workspace""" run = ws.getRun() # Just copy all metadata in the file for key in metadata.keys(): run.addProperty(key, str(metadata[key]), True) # Add correct start and end time start_time = np.datetime64(datetime.datetime.strptime(metadata['time']+' '+metadata['date'], '%I:%M:%S %p %m/%d/%Y')) run.addProperty('start_time', str(start_time), True) # Create time array for time series logs time_array = start_time + np.cumsum(data['time'], dtype=np.int64)*np.timedelta64(1,'s') run.addProperty('end_time', str(time_array[-1]), True) run.addProperty('duration', float((time_array[-1]-time_array[0])/np.timedelta64(1, 's')), True) # Create time series logs for the scan variables for name in data.dtype.names: if 'anode' not in name: log = FloatTimeSeriesProperty(name) for t, v in zip(time_array, data[name]): log.addValue(t, v) run[name]=log
def setUp(self): if self.__class__._source is not None: return height = FloatTimeSeriesProperty("height") height.addValue("2007-11-30T16:17:00", 1) height.addValue("2007-11-30T16:17:10", 2) height.addValue("2007-11-30T16:17:20", 3) height.addValue("2007-11-30T16:17:30", 4) height.addValue("2007-11-30T16:17:40", 5) filter = BoolTimeSeriesProperty("filter") filter.addValue("2007-11-30T16:16:50", False) filter.addValue("2007-11-30T16:17:25", True) filter.addValue("2007-11-30T16:17:39", False) self.__class__._source = height self.__class__._filter = filter
def setUp(self): if self.__class__._source is not None: return height = FloatTimeSeriesProperty("height") height.addValue("2007-11-30T16:17:00",1) height.addValue("2007-11-30T16:17:10",2) height.addValue("2007-11-30T16:17:20",3) height.addValue("2007-11-30T16:17:30",4) height.addValue("2007-11-30T16:17:40",5) filter = BoolTimeSeriesProperty("filter") filter.addValue("2007-11-30T16:16:50",False) filter.addValue("2007-11-30T16:17:25",True) filter.addValue("2007-11-30T16:17:39",False) self.__class__._source = height self.__class__._filter = filter
def test_addFilter_filters_log(self): height_log = FloatTimeSeriesProperty("height_log") height_log.addValue("2008-Jun-17 11:10:44", -0.86526) height_log.addValue("2008-Jun-17 11:10:45", -1.17843) height_log.addValue("2008-Jun-17 11:10:47", -1.27995) height_log.addValue("2008-Jun-17 11:20:15", -1.38216) height_log.addValue("2008-Jun-17 11:20:16", -1.87435) height_log.addValue("2008-Jun-17 11:20:17", -2.70547) height_log.addValue("2008-Jun-17 11:20:19", -2.99125) height_log.addValue("2008-Jun-17 11:20:20", -3) height_log.addValue("2008-Jun-17 11:20:27", -2.98519) height_log.addValue("2008-Jun-17 11:20:29", -2.68904) period_log = BoolTimeSeriesProperty("period 7") period_log.addValue("2008-Jun-17 11:11:13", False) period_log.addValue("2008-Jun-17 11:11:13", False) period_log.addValue("2008-Jun-17 11:11:18", False) period_log.addValue("2008-Jun-17 11:11:30", False) period_log.addValue("2008-Jun-17 11:11:42", False) period_log.addValue("2008-Jun-17 11:11:52", False) period_log.addValue("2008-Jun-17 11:12:01", False) period_log.addValue("2008-Jun-17 11:12:11", False) period_log.addValue("2008-Jun-17 11:12:21", True) period_log.addValue("2008-Jun-17 11:12:32", False) self.assertEquals(height_log.size(), 10) filter = LogFilter(height_log) filter.addFilter(period_log) filtered = filter.data() self.assertEquals(filtered.size(), 1)
def test_addFilter_filters_log(self): height_log = FloatTimeSeriesProperty("height_log"); height_log.addValue("2008-Jun-17 11:10:44", -0.86526) height_log.addValue("2008-Jun-17 11:10:45", -1.17843) height_log.addValue("2008-Jun-17 11:10:47", -1.27995) height_log.addValue("2008-Jun-17 11:20:15", -1.38216) height_log.addValue("2008-Jun-17 11:20:16", -1.87435) height_log.addValue("2008-Jun-17 11:20:17", -2.70547) height_log.addValue("2008-Jun-17 11:20:19", -2.99125) height_log.addValue("2008-Jun-17 11:20:20", -3); height_log.addValue("2008-Jun-17 11:20:27", -2.98519) height_log.addValue("2008-Jun-17 11:20:29", -2.68904) period_log = BoolTimeSeriesProperty("period 7") period_log.addValue("2008-Jun-17 11:11:13", False) period_log.addValue("2008-Jun-17 11:11:13", False) period_log.addValue("2008-Jun-17 11:11:18", False) period_log.addValue("2008-Jun-17 11:11:30", False) period_log.addValue("2008-Jun-17 11:11:42", False) period_log.addValue("2008-Jun-17 11:11:52", False) period_log.addValue("2008-Jun-17 11:12:01", False) period_log.addValue("2008-Jun-17 11:12:11", False) period_log.addValue("2008-Jun-17 11:12:21", True) period_log.addValue("2008-Jun-17 11:12:32", False) self.assertEquals(height_log.size(), 10); filter = LogFilter(height_log) filter.addFilter(period_log) filtered = filter.data() self.assertEquals(filtered.size(), 1)
ws = LoadMD('HB3A_exp0724_scan0182.nxs') SetGoniometer(ws, Axis0='omega,0,1,0,-1', Axis1='chi,0,0,1,-1', Axis2='phi,0,1,0,-1', Average=False) r = ws.getExperimentInfo(0).run() for i in range(r.getNumGoniometers()): print(i,r.getGoniometer(i).getEulerAngles('YZY')) ws = LoadILLDiffraction(Filename='ILL/D20/000017.nxs') SetGoniometer(ws, Axis0='omega.position,0,1,0,1', Average=False) for i in range(ws.run().getNumGoniometers()): print(f'{i} omega = {ws.run().getGoniometer(i).getEulerAngles("YZY")[0]:.1f}') SetGoniometer(ws, Axis0='omega.position,0,1,0,1') for i in range(ws.run().getNumGoniometers()): print(f'{i} omega = {ws.run().getGoniometer(i).getEulerAngles("YZY")[0]:.1f}') ws=LoadMD('ExternalData/Testing/Data/SystemTest/HB2C_WANDSCD_data.nxs') s1 = ws.getExperimentInfo(0).run().getLogData('s1').value s1_log = FloatTimeSeriesProperty('s1') for n, v in enumerate(s1): s1_log.addValue(n*1e6,v) ws.getExperimentInfo(0).run()['s1'] = s1_log ws.getExperimentInfo(0).run().getProperty('s1').units = 'deg' SetGoniometer(ws, Axis0='s1,0,1,0,1', Average=False) r = ws.getExperimentInfo(0).run() for i in range(r.getNumGoniometers()): print(f'{i} omega = {r.getGoniometer(i).getEulerAngles("YZY")[0]:.1f}')
def add_time_series_property(name, run, times, values): log = FloatTimeSeriesProperty(name) for t, v in zip(times, values): log.addValue(t, v) run[name] = log
def runTest(self): S = np.random.random(32 * 240 * 100) ConvertWANDSCDtoQTest_data = CreateMDHistoWorkspace( Dimensionality=3, Extents='0.5,32.5,0.5,240.5,0.5,100.5', SignalInput=S.ravel('F'), ErrorInput=np.sqrt(S.ravel('F')), NumberOfBins='32,240,100', Names='y,x,scanIndex', Units='bin,bin,number') ConvertWANDSCDtoQTest_dummy = CreateSingleValuedWorkspace() LoadInstrument(ConvertWANDSCDtoQTest_dummy, InstrumentName='WAND', RewriteSpectraMap=False) ConvertWANDSCDtoQTest_data.addExperimentInfo( ConvertWANDSCDtoQTest_dummy) log = FloatTimeSeriesProperty('s1') for t, v in zip(range(100), np.arange(0, 50, 0.5)): log.addValue(t, v) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run()['s1'] = log ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'duration', [60.] * 100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'monitor_count', [120000.] * 100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'twotheta', list(np.linspace(np.pi * 2 / 3, 0, 240).repeat(32)), True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'azimuthal', list(np.tile(np.linspace(-0.15, 0.15, 32), 240)), True) peaks = CreatePeaksWorkspace(NumberOfPeaks=0, OutputType='LeanElasticPeak') SetUB(ConvertWANDSCDtoQTest_data, 5, 5, 7, 90, 90, 120, u=[-1, 0, 1], v=[1, 0, 1]) SetGoniometer(ConvertWANDSCDtoQTest_data, Axis0='s1,0,1,0,1', Average=False) CopySample(InputWorkspace=ConvertWANDSCDtoQTest_data, OutputWorkspace=peaks, CopyName=False, CopyMaterial=False, CopyEnvironment=False, CopyShape=False, CopyLattice=True) Q = ConvertWANDSCDtoQ(InputWorkspace=ConvertWANDSCDtoQTest_data, UBWorkspace=peaks, Wavelength=1.486, Frame='HKL', Uproj='1,1,0', Vproj='-1,1,0', BinningDim0='-6.04,6.04,151', BinningDim1='-6.04,6.04,151', BinningDim2='-6.04,6.04,151') data_norm = ConvertHFIRSCDtoMDE(ConvertWANDSCDtoQTest_data, Wavelength=1.486, MinValues='-6.04,-6.04,-6.04', MaxValues='6.04,6.04,6.04') HKL = ConvertQtoHKLMDHisto(data_norm, PeaksWorkspace=peaks, Uproj='1,1,0', Vproj='-1,1,0', Extents='-6.04,6.04,-6.04,6.04,-6.04,6.04', Bins='151,151,151') for i in range(HKL.getNumDims()): print(HKL.getDimension(i).getUnits(), Q.getDimension(i).getUnits()) np.testing.assert_equal( HKL.getDimension(i).getUnits(), Q.getDimension(i).getUnits()) hkl_data = mtd["HKL"].getSignalArray() Q_data = mtd["Q"].getSignalArray() print(np.isnan(Q_data).sum()) print(np.isclose(hkl_data, 0).sum()) xaxis = mtd["HKL"].getXDimension() yaxis = mtd["HKL"].getYDimension() zaxis = mtd["HKL"].getZDimension() x, y, z = np.meshgrid( np.linspace(xaxis.getMinimum(), xaxis.getMaximum(), xaxis.getNBins()), np.linspace(yaxis.getMinimum(), yaxis.getMaximum(), yaxis.getNBins()), np.linspace(zaxis.getMinimum(), zaxis.getMaximum(), zaxis.getNBins()), indexing="ij", copy=False, ) print( x[~np.isnan(Q_data)].mean(), y[~np.isnan(Q_data)].mean(), z[~np.isnan(Q_data)].mean(), ) print( x[~np.isclose(hkl_data, 0)].mean(), y[~np.isclose(hkl_data, 0)].mean(), z[~np.isclose(hkl_data, 0)].mean(), ) np.testing.assert_almost_equal(x[~np.isnan(Q_data)].mean(), x[~np.isclose(hkl_data, 0)].mean(), decimal=2) np.testing.assert_almost_equal(y[~np.isnan(Q_data)].mean(), y[~np.isclose(hkl_data, 0)].mean(), decimal=2) np.testing.assert_almost_equal(z[~np.isnan(Q_data)].mean(), z[~np.isclose(hkl_data, 0)].mean(), decimal=1)
def setUpClass(cls): def gaussian(x, y, z, x0, y0, z0, ox, oy, oz, A): return A * np.exp(-(x - x0)**2 / (2 * ox**2) - (y - y0)**2 / (2 * oy**2) - (z - z0)**2 / (2 * oz**2)) def peaks(i, j, k): return gaussian(i, j, k, 16, 100, 50, 2, 2, 2, 20) + gaussian( i, j, k, 16, 150, 50, 1, 1, 1, 10) S = np.fromfunction(peaks, (32, 240, 100)) ConvertWANDSCDtoQTest_data = CreateMDHistoWorkspace( Dimensionality=3, Extents='0.5,32.5,0.5,240.5,0.5,100.5', SignalInput=S.ravel('F'), ErrorInput=np.sqrt(S.ravel('F')), NumberOfBins='32,240,100', Names='y,x,scanIndex', Units='bin,bin,number') ConvertWANDSCDtoQTest_dummy = CreateSingleValuedWorkspace() ConvertWANDSCDtoQTest_data.addExperimentInfo( ConvertWANDSCDtoQTest_dummy) log = FloatTimeSeriesProperty('s1') for t, v in zip(range(100), np.arange(0, 50, 0.5)): log.addValue(t, v) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run()['s1'] = log ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'duration', [60.] * 100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'monitor_count', [120000.] * 100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'twotheta', list(np.linspace(np.pi * 2 / 3, 0, 240).repeat(32)), True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'azimuthal', list(np.tile(np.linspace(-0.15, 0.15, 32), 240)), True) SetUB(ConvertWANDSCDtoQTest_data, 5, 5, 7, 90, 90, 120, u=[-1, 0, 1], v=[1, 0, 1]) SetGoniometer(ConvertWANDSCDtoQTest_data, Axis0='s1,0,1,0,1', Average=False) # Create Normalisation workspace S = np.ones((32, 240, 1)) ConvertWANDSCDtoQTest_norm = CreateMDHistoWorkspace( Dimensionality=3, Extents='0.5,32.5,0.5,240.5,0.5,1.5', SignalInput=S, ErrorInput=S, NumberOfBins='32,240,1', Names='y,x,scanIndex', Units='bin,bin,number') ConvertWANDSCDtoQTest_dummy2 = CreateSingleValuedWorkspace() ConvertWANDSCDtoQTest_norm.addExperimentInfo( ConvertWANDSCDtoQTest_dummy2) ConvertWANDSCDtoQTest_norm.getExperimentInfo(0).run().addProperty( 'monitor_count', [100000.], True)