def _create_single_test_workspace(fwhm, output_name, i): function = "name=Lorentzian,Amplitude=100,PeakCentre=27500,FWHM=" + str( fwhm) CreateSampleWorkspace(Function='User Defined', UserDefinedFunction=function, XMin=27000, XMax=28000, BinWidth=10, NumBanks=1, OutputWorkspace=output_name) ConvertUnits(InputWorkspace=output_name, OutputWorkspace=output_name, Target='DeltaE', EMode='Indirect', EFixed=1.5) Rebin(InputWorkspace=output_name, OutputWorkspace=output_name, Params=[-0.2, 0.004, 0.2]) LoadInstrument(Workspace=output_name, InstrumentName='IRIS', RewriteSpectraMap=True) SetInstrumentParameter(Workspace=output_name, ParameterName='Efixed', DetectorList=range(1, 113), ParameterType='Number', Value='1.5') output = AnalysisDataService.retrieve(output_name) output.mutableRun()['run_number'] = i + 1 output.mutableRun()['sample'] = [1, 2, 3] output.mutableRun()['sample'].units = " "
def test_multiple_histograms(self): energyBins = np.arange(-7., 7., 0.13) qs = np.array([1.1, 1.3, 1.5, 1.7]) EFixed = 8. Ys = np.ones(3 * (len(energyBins) - 1)) Es = Ys verticalAxis = [str(q) for q in qs] ws = CreateWorkspace(energyBins, Ys, Es, NSpec=3, UnitX='DeltaE', VerticalAxisUnit='MomentumTransfer', VerticalAxisValues=verticalAxis, StoreInADS=False) LoadInstrument(ws, InstrumentName='IN4', RewriteSpectraMap=False, StoreInADS=False) AddSampleLog(ws, LogName='Ei', LogText=str(EFixed), LogType='Number', LogUnit='meV', StoreInADS=False) dos = ComputeIncoherentDOS(ws, EnergyBinning='Emin, Emax', StoreInADS=False) self.assertEqual(dos.getNumberHistograms(), 1) self.assertEqual(dos.getAxis(0).getUnit().unitID(), 'DeltaE') dos_Xs = dos.readX(0) self.assertEqual(len(dos_Xs), len(energyBins)) dos_Ys = dos.readY(0) dos_Es = dos.readE(0) g1 = self.compute(qs[0:2], energyBins) g2 = self.compute(qs[1:3], energyBins) g3 = self.compute(qs[2:4], energyBins) g = (g1 + g2 + g3) / 3 gE = np.sqrt(g1**2 + g2**2 + g3**2) / 3 np.testing.assert_equal(dos_Xs, energyBins) for i in range(len(dos_Ys)): self.assertAlmostEquals(dos_Ys[i], g[i]) self.assertAlmostEquals(dos_Es[i], gE[i])
def load_file_and_apply(self, filename, ws_name): Load(Filename=filename, OutputWorkspace=ws_name, FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if self._load_inst: LoadInstrument(Workspace=ws_name, Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if self._apply_cal: ApplyCalibration( Workspace=ws_name, CalibrationTable=self.getProperty("ApplyCalibration").value) if self._detcal: LoadIsawDetCal(InputWorkspace=ws_name, Filename=self.getProperty("DetCal").value) if self._copy_params: CopyInstrumentParameters(OutputWorkspace=ws_name, InputWorkspace=self.getProperty( "CopyInstrumentParameters").value) if self._masking: if not mtd.doesExist('__mask'): LoadMask(Instrument=mtd[ws_name].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace=ws_name, MaskedWorkspace='__mask') if self.XMin != Property.EMPTY_DBL and self.XMax != Property.EMPTY_DBL: ConvertUnits(InputWorkspace=ws_name, OutputWorkspace=ws_name, Target='Momentum') CropWorkspaceForMDNorm(InputWorkspace=ws_name, OutputWorkspace=ws_name, XMin=self.XMin, XMax=self.XMax)
def test_edge_case(self): # This is to capture the case where there are identical theta # values for multiple spectra which cause problems when # ResampleX is run with particular Xmin and Xmax causing the # output to be all zeros. # Create more real test workspace, using same properties as # HB2C_558131 data = np.ones((256, 1920)) CreateWorkspace( DataX=[0, 1], DataY=data, DataE=np.sqrt(data), UnitX='Empty', YUnitLabel='Counts', NSpec=1966080 // 4, OutputWorkspace='tmp_ws') AddSampleLog('tmp_ws', LogName='HB2C:Mot:s2.RBV', LogText='29.9774', LogType='Number Series', NumberType='Double') AddSampleLog('tmp_ws', LogName='HB2C:Mot:detz.RBV', LogText='0', LogType='Number Series', NumberType='Double') tmp_ws = mtd['tmp_ws'] for n in range(tmp_ws.getNumberHistograms()): s = tmp_ws.getSpectrum(n) for i in range(2): for j in range(2): s.addDetectorID(int(n * 2 % 512 + n // (512 / 2) * 512 * 2 + j + i * 512)) LoadInstrument('tmp_ws', InstrumentName='WAND', RewriteSpectraMap=False) out = WANDPowderReduction('tmp_ws', Target='Theta', XMin=29, XMax=31, NumberBins=10, NormaliseBy='None') np.testing.assert_allclose(out.readY(0), [0, 0, 0, 0, 269.068237, 486.311606, 618.125152, 720.37274, 811.141863, 821.032586], rtol=5e-4) tmp_ws.delete() out.delete()
def load_instrument(raw_data_ws_name, idf_name, two_theta, cal_shift_x, cal_shift_y): """ Set up parameters to workspace and load instrument """ # instrument position AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::2theta', LogText='{}'.format(-two_theta), LogType='Number Series', LogUnit='degree', NumberType='Double') # calibration information AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::arm', LogText='0.0', LogType='Number Series', LogUnit='meter', NumberType='Double') AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::deltax', LogText='{}'.format(cal_shift_x), LogType='Number Series', LogUnit='meter', NumberType='Double') AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::deltay', LogText='{}'.format(-cal_shift_y), LogType='Number Series', LogUnit='meter', NumberType='Double') AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::flip', LogText='0.0', LogType='Number Series', LogUnit='degree', NumberType='Double') AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::roty', LogText='{}'.format(-two_theta), LogType='Number Series', LogUnit='degree', NumberType='Double') AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::spin', LogText='90.0', LogType='Number Series', LogUnit='degree', NumberType='Double') # Load instrument LoadInstrument(Workspace=raw_data_ws_name, Filename=idf_name, RewriteSpectraMap=True) # print output print_position(mtd[raw_data_ws_name]) return mtd[raw_data_ws_name]
def createSampleWorkspace(self): """ Create a dummy workspace that looks like a sample run""" #create a dummy workspace function = "name=Lorentzian,Amplitude=1,PeakCentre=5,FWHM=1" ws = CreateSampleWorkspace("Histogram", Function="User Defined", UserDefinedFunction=function, XMin=0, XMax=10, BinWidth=0.01, XUnit="DeltaE") ws = ScaleX(ws, -5, "Add") #shift to center on 0 ws = ScaleX(ws, 0.1) #scale to size LoadInstrument(ws, InstrumentName='IRIS', RewriteSpectraMap=True) return ws
def test_that_reset_to_default_groups_creates_correct_groups_and_pairs_for_single_period_data(self): workspace = CreateSampleWorkspace() LoadInstrument(workspace, InstrumentName="EMU", RewriteSpectraMap=True) self.context.reset_group_and_pairs_to_default(workspace, 'EMU', 'longitudanal', 1) self.assertEquals(self.context.group_names, ['fwd', 'bwd']) self.assertEquals(self.context.pair_names, ['long']) for group in self.context.groups: self.assertEquals(group.periods, [1])
def unitySTwoThetaWSingleHistogram(self, energyBins, qs): EFixed = 8. Ys = np.ones(len(energyBins) - 1) Es = Ys verticalAxis = [str(q) for q in qs] ws = CreateWorkspace(energyBins, Ys, Es, UnitX='DeltaE', VerticalAxisUnit='Degrees', VerticalAxisValues=verticalAxis, StoreInADS=False) LoadInstrument(ws, InstrumentName='IN4', RewriteSpectraMap=False, StoreInADS=False) AddSampleLog(ws, LogName='Ei', LogText=str(EFixed), LogType='Number', LogUnit='meV', StoreInADS=False) return ws
def create_fake_workspace(self, name): workspace_mock = CreateSampleWorkspace(StoreInADS=False) LoadInstrument(Workspace=workspace_mock, InstrumentName='EMU', RewriteSpectraMap=False, StoreInADS=False) return { 'OutputWorkspace': [MuonWorkspaceWrapper(workspace_mock)], 'MainFieldDirection': 'transverse' }
def load_file_and_apply(self, filename, ws_name, offset): Load(Filename=filename, OutputWorkspace=ws_name, FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if self._load_inst: LoadInstrument(Workspace=ws_name, Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if self._apply_cal: ApplyCalibration( Workspace=ws_name, CalibrationTable=self.getProperty("ApplyCalibration").value) if self._detcal: LoadIsawDetCal(InputWorkspace=ws_name, Filename=self.getProperty("DetCal").value) if self._copy_params: CopyInstrumentParameters(OutputWorkspace=ws_name, InputWorkspace=self.getProperty( "CopyInstrumentParameters").value) MaskDetectors(Workspace=ws_name, MaskedWorkspace='__sa') if offset != 0: if self.getProperty('SetGoniometer').value: SetGoniometer( Workspace=ws_name, Goniometers=self.getProperty('Goniometers').value, Axis0='{},0,1,0,1'.format(offset), Axis1=self.getProperty('Axis0').value, Axis2=self.getProperty('Axis1').value, Axis3=self.getProperty('Axis2').value) else: SetGoniometer(Workspace=ws_name, Axis0='{},0,1,0,1'.format(offset), Axis1='omega,0,1,0,1', Axis2='chi,0,0,1,1', Axis3='phi,0,1,0,1') else: if self.getProperty('SetGoniometer').value: SetGoniometer( Workspace=ws_name, Goniometers=self.getProperty('Goniometers').value, Axis0=self.getProperty('Axis0').value, Axis1=self.getProperty('Axis1').value, Axis2=self.getProperty('Axis2').value) ConvertUnits(InputWorkspace=ws_name, OutputWorkspace=ws_name, Target='Momentum') CropWorkspaceForMDNorm(InputWorkspace=ws_name, OutputWorkspace=ws_name, XMin=self.XMin, XMax=self.XMax)
def test_window_force_deleted_correctly(self): ws = CreateSampleWorkspace() LoadInstrument(ws, InstrumentName='MARI', RewriteSpectraMap=False) p = InstrumentViewPresenter(ws) self.assert_widget_created() p.force_close() QApplication.processEvents() self.assertEqual(None, p.ads_observer) self.assert_widget_not_present("instr") self.assert_no_toplevel_widgets()
def createPhononWS(self, T, en, e_units): fn = 'name=Gaussian, PeakCentre='+str(en)+', Height=1, Sigma=0.5;' fn +='name=Gaussian, PeakCentre=-'+str(en)+', Height='+str(np.exp(-en*11.6/T))+', Sigma=0.5;' ws = CreateSampleWorkspace(binWidth = 0.1, XMin = -25, XMax = 25, XUnit = e_units, Function = 'User Defined', UserDefinedFunction=fn) LoadInstrument(ws, InstrumentName='MARI', RewriteSpectraMap = True) with self.assertRaises(RuntimeError): ws_DOS = ComputeIncoherentDOS(ws) ws = SofQW3(ws, [0, 0.05, 8], 'Direct', 25) qq = np.arange(0, 8, 0.05)+0.025 for i in range(ws.getNumberHistograms()): ws.setY(i, ws.readY(i)*qq[i]**2) ws.setE(i, ws.readE(i)*qq[i]**2) return ws
def createSampleWorkspace(self): function = "name=Lorentzian,Amplitude=1,PeakCentre=5,FWHM=1" workspace = CreateSampleWorkspace(WorkspaceType="Histogram", Function="User Defined", UserDefinedFunction=function, XMin=0, XMax=10, BinWidth=0.01, XUnit="DeltaE") # Shift to center on 0 and then scale to size workspace = ScaleX(workspace, -5, "Add") workspace = ScaleX(workspace, 0.1) LoadInstrument(Workspace=workspace, InstrumentName='IRIS', RewriteSpectraMap=True) return workspace
def test_that_reset_to_default_groups_creates_correct_groups_and_pairs_for_multi_period_data(self): workspace = CreateSampleWorkspace() LoadInstrument(workspace, InstrumentName="EMU", RewriteSpectraMap=True) self.context.reset_group_and_pairs_to_default(workspace, 'EMU', 'longitudanal', 2) self.assertEquals(self.context.group_names, ['fwd1', 'bwd1', 'fwd2', 'bwd2']) self.assertEquals(self.context.pair_names, ['long1', 'long2']) self.assertEquals(self.context.groups[0].periods, [1]) self.assertEquals(self.context.groups[1].periods, [1]) self.assertEquals(self.context.groups[2].periods, [2]) self.assertEquals(self.context.pairs[0].forward_group, 'fwd1') self.assertEquals(self.context.pairs[0].backward_group, 'bwd1') self.assertEquals(self.context.pairs[1].forward_group, 'fwd2') self.assertEquals(self.context.pairs[1].backward_group, 'bwd2') self.assertEquals(self.context.selected, 'long1')
def setUp(self): # Simulation of the creation of the Sample data nb = 102 xd = 10 yd = 10 data = numpy.random.normal(size=xd*yd*nb)/2.0 + 5.0 xvalues = numpy.linspace(3500,43500,nb+1) tv = numpy.linspace(7e-2,6e-2,nb) Sample = CreateWorkspace(xvalues,data,NSpec=xd*yd) LoadInstrument(Sample, InstrumentName='SANS2D') Sample = CropWorkspace(Sample,StartWorkspaceIndex=8)#remove the monitors # create a transmission workspace Trans = CropWorkspace(Sample,StartWorkspaceIndex=10,EndWorkspaceIndex=10) x_v = Trans.dataX(0)[:-1] y_v = numpy.linspace(0.743139, 0.6,nb) e_v = y_v/58.0 Trans.setY(0,y_v) Trans.setE(0,e_v) self._sample = Sample self._trans = Trans self.n_det = xd*yd
def test_select_and_get_tab(self): """Test launch and close instrument view with ARCS data """ # create workspace ws = CreateSampleWorkspace() LoadInstrument(ws, InstrumentName='ARCS', RewriteSpectraMap=False) # No Qt widgets so far self.assert_no_toplevel_widgets() # create instrument view presenter iv_presenter = InstrumentViewPresenter(ws, parent=None, ads_observer=None) self.assert_widget_created() # select pick tab iv_presenter.select_pick_tab() current_tab_index = iv_presenter.container.widget.getCurrentTab() assert current_tab_index == 1 pick_tab = iv_presenter.get_pick_tab() assert pick_tab # render tab iv_presenter.select_render_tab() current_tab_index = iv_presenter.container.widget.getCurrentTab() assert current_tab_index == 0 render_tab = iv_presenter.get_render_tab() assert render_tab # set TOF bin range iv_presenter.set_bin_range(1000, 12000) # close iv_presenter.close(ws.name()) # process events to close all the widgets QApplication.processEvents() # asset no more widgets self.assert_no_toplevel_widgets()
def test_render_tab(self): """Test setting view and setting axis in the render tab """ # create workspace ws = CreateSampleWorkspace() LoadInstrument(ws, InstrumentName='ARCS', RewriteSpectraMap=False) # create instrument view presenter iv_presenter = InstrumentViewPresenter(ws, parent=None, ads_observer=None) self.assert_widget_created() # get render tab render_tab = iv_presenter.get_render_tab() assert render_tab # select projection render_tab.setSurfaceType(InstrumentWidget.CYLINDRICAL_X) render_tab.setSurfaceType(InstrumentWidget.FULL3D) # select axis under Full3D render_tab.setAxis('Y+') # disable autoscaling render_tab.setColorMapAutoscaling(False) # set min and max value to color bar render_tab.setMinValue(10, False) render_tab.setMaxValue(40, False) # close iv_presenter.close(ws.name()) # process events to close all the widgets QApplication.processEvents() # asset no more widgets self.assert_no_toplevel_widgets()
def _loadRunsAndReturnWorkspaceNames(self): ''' Loads tof data from all files in a specified folder into workspaces. The instrument geometry is loaded using the IDF provided. :return: The list of workspace names for saving. ''' files = [f for f in os.listdir(self.folder) if self.ext in f] self.wsNames = [] for file in files: infile = os.path.join(self.folder, file) print "Loading ", infile validIDs = self._loadValidIDs() detMap = self._loadDetectorMap() tofData = self._loadTofData(infile, validIDs, detMap) tofx, tofy = tofData wsName = file.replace(self.ext, "") ws = CreateWorkspace(tofx, tofy, NSpec=len(validIDs), OutputWorkspace=wsName) LoadInstrument(ws, True, self.idf) self.wsNames.append(wsName)
wavelength = data.attrs['wavelength'] s1 = data.attrs['s1'] s2 = data.attrs['s2'] detz = data.attrs['detz'] y = data["y"].value e = data["e"].value t1 = time.time() CreateWorkspace(OutputWorkspace='ws', DataX=wavelength, DataY=y, DataE=e, NSpec=y.size, UnitX='Wavelength') SetGoniometer('ws', Axis0="{},0,1,0,1".format(s1)) AddSampleLog('ws', LogName='HB2C:Mot:s2.RBV', LogText=str(s2), LogType='Number Series') AddSampleLog('ws', LogName='HB2C:Mot:detz.RBV', LogText=str(detz), LogType='Number Series') LoadInstrument('ws', InstrumentName=instrument, RewriteSpectraMap=True) t2 = time.time() print("t1={}".format(t1 - t0)) print("t2={}".format(t2 - t1))
def PyExec(self): _background = bool(self.getProperty("Background").value) _load_inst = bool(self.getProperty("LoadInstrument").value) _norm_current = bool(self.getProperty("NormaliseByCurrent").value) _detcal = bool(self.getProperty("DetCal").value) _masking = bool(self.getProperty("MaskFile").value) _grouping = bool(self.getProperty("GroupingFile").value) _anvred = bool(self.getProperty("SphericalAbsorptionCorrection").value) _SA_name = self.getPropertyValue("SolidAngleOutputWorkspace") _Flux_name = self.getPropertyValue("FluxOutputWorkspace") XMin = self.getProperty("MomentumMin").value XMax = self.getProperty("MomentumMax").value rebin_param = ','.join([str(XMin), str(XMax), str(XMax)]) Load(Filename=self.getPropertyValue("Filename"), OutputWorkspace='__van', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _norm_current: NormaliseByCurrent(InputWorkspace='__van', OutputWorkspace='__van') if _background: Load(Filename=self.getProperty("Background").value, OutputWorkspace='__bkg', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _norm_current: NormaliseByCurrent(InputWorkspace='__bkg', OutputWorkspace='__bkg') else: pc_van = mtd['__van'].run().getProtonCharge() pc_bkg = mtd['__bkg'].run().getProtonCharge() mtd['__bkg'] *= pc_van / pc_bkg mtd['__bkg'] *= self.getProperty('BackgroundScale').value Minus(LHSWorkspace='__van', RHSWorkspace='__bkg', OutputWorkspace='__van') DeleteWorkspace('__bkg') if _load_inst: LoadInstrument(Workspace='__van', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__van', Filename=self.getProperty("DetCal").value) if _masking: LoadMask(Instrument=mtd['__van'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__van', MaskedWorkspace='__mask') DeleteWorkspace('__mask') ConvertUnits(InputWorkspace='__van', OutputWorkspace='__van', Target='Momentum') Rebin(InputWorkspace='__van', OutputWorkspace='__van', Params=rebin_param) CropWorkspace(InputWorkspace='__van', OutputWorkspace='__van', XMin=XMin, XMax=XMax) if _anvred: AnvredCorrection(InputWorkspace='__van', OutputWorkspace='__van', LinearScatteringCoef=self.getProperty( "LinearScatteringCoef").value, LinearAbsorptionCoef=self.getProperty( "LinearAbsorptionCoef").value, Radius=self.getProperty("Radius").value, OnlySphericalAbsorption='1', PowerLambda='0') # Create solid angle Rebin(InputWorkspace='__van', OutputWorkspace=_SA_name, Params=rebin_param, PreserveEvents=False) # Create flux if _grouping: GroupDetectors(InputWorkspace='__van', OutputWorkspace='__van', MapFile=self.getProperty("GroupingFile").value) else: SumSpectra(InputWorkspace='__van', OutputWorkspace='__van') Rebin(InputWorkspace='__van', OutputWorkspace='__van', Params=rebin_param) flux = mtd['__van'] for i in range(flux.getNumberHistograms()): el = flux.getSpectrum(i) if flux.readY(i)[0] > 0: el.divide(flux.readY(i)[0], flux.readE(i)[0]) SortEvents(InputWorkspace='__van', SortBy="X Value") IntegrateFlux(InputWorkspace='__van', OutputWorkspace=_Flux_name, NPoints=10000) DeleteWorkspace('__van') self.setProperty("SolidAngleOutputWorkspace", mtd[_SA_name]) self.setProperty("FluxOutputWorkspace", mtd[_Flux_name])
def PyExec(self): # remove possible old temp workspaces [ DeleteWorkspace(ws) for ws in self.temp_workspace_list if mtd.doesExist(ws) ] _background = bool(self.getProperty("Background").value) _load_inst = bool(self.getProperty("LoadInstrument").value) _detcal = bool(self.getProperty("DetCal").value) _masking = bool(self.getProperty("MaskFile").value) _outWS_name = self.getPropertyValue("OutputWorkspace") UBList = self._generate_UBList() dim0_min, dim0_max, dim0_bins = self.getProperty('BinningDim0').value dim1_min, dim1_max, dim1_bins = self.getProperty('BinningDim1').value dim2_min, dim2_max, dim2_bins = self.getProperty('BinningDim2').value MinValues = "{},{},{}".format(dim0_min, dim1_min, dim2_min) MaxValues = "{},{},{}".format(dim0_max, dim1_max, dim2_max) AlignedDim0 = ",{},{},{}".format(dim0_min, dim0_max, int(dim0_bins)) AlignedDim1 = ",{},{},{}".format(dim1_min, dim1_max, int(dim1_bins)) AlignedDim2 = ",{},{},{}".format(dim2_min, dim2_max, int(dim2_bins)) LoadNexus(Filename=self.getProperty("SolidAngle").value, OutputWorkspace='__sa') LoadNexus(Filename=self.getProperty("Flux").value, OutputWorkspace='__flux') if _masking: LoadMask(Instrument=mtd['__sa'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__sa', MaskedWorkspace='__mask') DeleteWorkspace('__mask') XMin = mtd['__sa'].getXDimension().getMinimum() XMax = mtd['__sa'].getXDimension().getMaximum() if _background: Load(Filename=self.getProperty("Background").value, OutputWorkspace='__bkg', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _load_inst: LoadInstrument( Workspace='__bkg', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__bkg', Filename=self.getProperty("DetCal").value) MaskDetectors(Workspace='__bkg', MaskedWorkspace='__sa') ConvertUnits(InputWorkspace='__bkg', OutputWorkspace='__bkg', Target='Momentum') CropWorkspace(InputWorkspace='__bkg', OutputWorkspace='__bkg', XMin=XMin, XMax=XMax) progress = Progress( self, 0.0, 1.0, len(UBList) * len(self.getProperty("Filename").value)) for run in self.getProperty("Filename").value: logger.notice("Working on " + run) Load(Filename=run, OutputWorkspace='__run', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _load_inst: LoadInstrument( Workspace='__run', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__run', Filename=self.getProperty("DetCal").value) MaskDetectors(Workspace='__run', MaskedWorkspace='__sa') ConvertUnits(InputWorkspace='__run', OutputWorkspace='__run', Target='Momentum') CropWorkspace(InputWorkspace='__run', OutputWorkspace='__run', XMin=XMin, XMax=XMax) if self.getProperty('SetGoniometer').value: SetGoniometer( Workspace='__run', Goniometers=self.getProperty('Goniometers').value, Axis0=self.getProperty('Axis0').value, Axis1=self.getProperty('Axis1').value, Axis2=self.getProperty('Axis2').value) # Set background Goniometer to be the same as data if _background: mtd['__bkg'].run().getGoniometer().setR( mtd['__run'].run().getGoniometer().getR()) for ub in UBList: SetUB(Workspace='__run', UB=ub) ConvertToMD(InputWorkspace='__run', OutputWorkspace='__md', QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('wproj').value, MinValues=MinValues, MaxValues=MaxValues) MDNormSCD( InputWorkspace=mtd['__md'], FluxWorkspace='__flux', SolidAngleWorkspace='__sa', OutputWorkspace='__data', SkipSafetyCheck=True, TemporaryDataWorkspace='__data' if mtd.doesExist('__data') else None, OutputNormalizationWorkspace='__norm', TemporaryNormalizationWorkspace='__norm' if mtd.doesExist('__norm') else None, AlignedDim0=mtd['__md'].getDimension(0).name + AlignedDim0, AlignedDim1=mtd['__md'].getDimension(1).name + AlignedDim1, AlignedDim2=mtd['__md'].getDimension(2).name + AlignedDim2) DeleteWorkspace('__md') if _background: SetUB(Workspace='__bkg', UB=ub) ConvertToMD(InputWorkspace='__bkg', OutputWorkspace='__bkg_md', QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('Wproj').value, MinValues=MinValues, MaxValues=MaxValues) MDNormSCD( InputWorkspace='__bkg_md', FluxWorkspace='__flux', SolidAngleWorkspace='__sa', SkipSafetyCheck=True, OutputWorkspace='__bkg_data', TemporaryDataWorkspace='__bkg_data' if mtd.doesExist('__bkg_data') else None, OutputNormalizationWorkspace='__bkg_norm', TemporaryNormalizationWorkspace='__bkg_norm' if mtd.doesExist('__bkg_norm') else None, AlignedDim0=mtd['__bkg_md'].getDimension(0).name + AlignedDim0, AlignedDim1=mtd['__bkg_md'].getDimension(1).name + AlignedDim1, AlignedDim2=mtd['__bkg_md'].getDimension(2).name + AlignedDim2) DeleteWorkspace('__bkg_md') progress.report() DeleteWorkspace('__run') if _background: # outWS = data / norm - bkg_data / bkg_norm * BackgroundScale DivideMD(LHSWorkspace='__data', RHSWorkspace='__norm', OutputWorkspace=_outWS_name + '_normalizedData') DivideMD(LHSWorkspace='__bkg_data', RHSWorkspace='__bkg_norm', OutputWorkspace=_outWS_name + '_normalizedBackground') CreateSingleValuedWorkspace( OutputWorkspace='__scale', DataValue=self.getProperty('BackgroundScale').value) MultiplyMD(LHSWorkspace=_outWS_name + '_normalizedBackground', RHSWorkspace='__scale', OutputWorkspace='__scaled_background') DeleteWorkspace('__scale') MinusMD(LHSWorkspace=_outWS_name + '_normalizedData', RHSWorkspace='__scaled_background', OutputWorkspace=_outWS_name) if self.getProperty('KeepTemporaryWorkspaces').value: RenameWorkspaces(InputWorkspaces=[ '__data', '__norm', '__bkg_data', '__bkg_norm' ], WorkspaceNames=[ _outWS_name + '_data', _outWS_name + '_normalization', _outWS_name + '_background_data', _outWS_name + '_background_normalization' ]) else: # outWS = data / norm DivideMD(LHSWorkspace='__data', RHSWorkspace='__norm', OutputWorkspace=_outWS_name) if self.getProperty('KeepTemporaryWorkspaces').value: RenameWorkspaces(InputWorkspaces=['__data', '__norm'], WorkspaceNames=[ _outWS_name + '_data', _outWS_name + '_normalization' ]) self.setProperty("OutputWorkspace", mtd[_outWS_name]) # remove temp workspaces [ DeleteWorkspace(ws) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]
def runTest(self): S = np.random.random(32 * 240 * 100) ConvertWANDSCDtoQTest_data = CreateMDHistoWorkspace( Dimensionality=3, Extents='0.5,32.5,0.5,240.5,0.5,100.5', SignalInput=S.ravel('F'), ErrorInput=np.sqrt(S.ravel('F')), NumberOfBins='32,240,100', Names='y,x,scanIndex', Units='bin,bin,number') ConvertWANDSCDtoQTest_dummy = CreateSingleValuedWorkspace() LoadInstrument(ConvertWANDSCDtoQTest_dummy, InstrumentName='WAND', RewriteSpectraMap=False) ConvertWANDSCDtoQTest_data.addExperimentInfo( ConvertWANDSCDtoQTest_dummy) log = FloatTimeSeriesProperty('s1') for t, v in zip(range(100), np.arange(0, 50, 0.5)): log.addValue(t, v) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run()['s1'] = log ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'duration', [60.] * 100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'monitor_count', [120000.] * 100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'twotheta', list(np.linspace(np.pi * 2 / 3, 0, 240).repeat(32)), True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'azimuthal', list(np.tile(np.linspace(-0.15, 0.15, 32), 240)), True) peaks = CreatePeaksWorkspace(NumberOfPeaks=0, OutputType='LeanElasticPeak') SetUB(ConvertWANDSCDtoQTest_data, 5, 5, 7, 90, 90, 120, u=[-1, 0, 1], v=[1, 0, 1]) SetGoniometer(ConvertWANDSCDtoQTest_data, Axis0='s1,0,1,0,1', Average=False) CopySample(InputWorkspace=ConvertWANDSCDtoQTest_data, OutputWorkspace=peaks, CopyName=False, CopyMaterial=False, CopyEnvironment=False, CopyShape=False, CopyLattice=True) Q = ConvertWANDSCDtoQ(InputWorkspace=ConvertWANDSCDtoQTest_data, UBWorkspace=peaks, Wavelength=1.486, Frame='HKL', Uproj='1,1,0', Vproj='-1,1,0', BinningDim0='-6.04,6.04,151', BinningDim1='-6.04,6.04,151', BinningDim2='-6.04,6.04,151') data_norm = ConvertHFIRSCDtoMDE(ConvertWANDSCDtoQTest_data, Wavelength=1.486, MinValues='-6.04,-6.04,-6.04', MaxValues='6.04,6.04,6.04') HKL = ConvertQtoHKLMDHisto(data_norm, PeaksWorkspace=peaks, Uproj='1,1,0', Vproj='-1,1,0', Extents='-6.04,6.04,-6.04,6.04,-6.04,6.04', Bins='151,151,151') for i in range(HKL.getNumDims()): print(HKL.getDimension(i).getUnits(), Q.getDimension(i).getUnits()) np.testing.assert_equal( HKL.getDimension(i).getUnits(), Q.getDimension(i).getUnits()) hkl_data = mtd["HKL"].getSignalArray() Q_data = mtd["Q"].getSignalArray() print(np.isnan(Q_data).sum()) print(np.isclose(hkl_data, 0).sum()) xaxis = mtd["HKL"].getXDimension() yaxis = mtd["HKL"].getYDimension() zaxis = mtd["HKL"].getZDimension() x, y, z = np.meshgrid( np.linspace(xaxis.getMinimum(), xaxis.getMaximum(), xaxis.getNBins()), np.linspace(yaxis.getMinimum(), yaxis.getMaximum(), yaxis.getNBins()), np.linspace(zaxis.getMinimum(), zaxis.getMaximum(), zaxis.getNBins()), indexing="ij", copy=False, ) print( x[~np.isnan(Q_data)].mean(), y[~np.isnan(Q_data)].mean(), z[~np.isnan(Q_data)].mean(), ) print( x[~np.isclose(hkl_data, 0)].mean(), y[~np.isclose(hkl_data, 0)].mean(), z[~np.isclose(hkl_data, 0)].mean(), ) np.testing.assert_almost_equal(x[~np.isnan(Q_data)].mean(), x[~np.isclose(hkl_data, 0)].mean(), decimal=2) np.testing.assert_almost_equal(y[~np.isnan(Q_data)].mean(), y[~np.isclose(hkl_data, 0)].mean(), decimal=2) np.testing.assert_almost_equal(z[~np.isnan(Q_data)].mean(), z[~np.isclose(hkl_data, 0)].mean(), decimal=1)
def PyExec(self): fn = self.getPropertyValue("Filename") wsn = self.getPropertyValue("OutputWorkspace") #print (fn, wsn) self.fxml = self.getPropertyValue("InstrumentXML") #load data parms_dict, det_udet, det_count, det_tbc, data = self.read_file(fn) nrows = int(parms_dict['NDET']) #nbins=int(parms_dict['NTC']) xdata = np.array(det_tbc) xdata_mon = np.linspace(xdata[0], xdata[-1], len(xdata)) ydata = data.astype(np.float) ydata = ydata.reshape(nrows, -1) edata = np.sqrt(ydata) #CreateWorkspace(OutputWorkspace=wsn,DataX=xdata,DataY=ydata,DataE=edata, # NSpec=nrows,UnitX='TOF',WorkspaceTitle='Data',YUnitLabel='Counts') nr, nc = ydata.shape ws = WorkspaceFactory.create("Workspace2D", NVectors=nr, XLength=nc + 1, YLength=nc) for i in range(nrows): ws.setX(i, xdata) ws.setY(i, ydata[i]) ws.setE(i, edata[i]) ws.getAxis(0).setUnit('tof') AnalysisDataService.addOrReplace(wsn, ws) #self.setProperty("OutputWorkspace", wsn) #print ("ws:", wsn) #ws=mtd[wsn] # fix the x values for the monitor for i in range(nrows - 2, nrows): ws.setX(i, xdata_mon) self.log().information("set detector IDs") #set detetector IDs for i in range(nrows): ws.getSpectrum(i).setDetectorID(det_udet[i]) #Sample_logs the header values are written into the sample logs log_names = [sl.encode('ascii', 'ignore') for sl in parms_dict.keys()] log_values = [ sl.encode('ascii', 'ignore') if isinstance(sl, types.UnicodeType) else str(sl) for sl in parms_dict.values() ] AddSampleLogMultiple(Workspace=wsn, LogNames=log_names, LogValues=log_values) SetGoniometer(Workspace=wsn, Goniometers='Universal') if (self.fxml == ""): LoadInstrument(Workspace=wsn, InstrumentName="Exed", RewriteSpectraMap=True) else: LoadInstrument(Workspace=wsn, Filename=self.fxml, RewriteSpectraMap=True) RotateInstrumentComponent( Workspace=wsn, ComponentName='Tank', Y=1, Angle=-float(parms_dict['phi'].encode('ascii', 'ignore')), RelativeRotation=False) # Separate monitors into seperate workspace ExtractSpectra(InputWorkspace=wsn, WorkspaceIndexList=','.join( [str(s) for s in range(nrows - 2, nrows)]), OutputWorkspace=wsn + '_Monitors') MaskDetectors(Workspace=wsn, WorkspaceIndexList=','.join( [str(s) for s in range(nrows - 2, nrows)])) RemoveMaskedSpectra(InputWorkspace=wsn, OutputWorkspace=wsn) self.setProperty("OutputWorkspace", wsn)
def PyExec(self): _load_inst = bool(self.getProperty("LoadInstrument").value) _detcal = bool(self.getProperty("DetCal").value) _masking = bool(self.getProperty("MaskFile").value) _outWS_name = self.getPropertyValue("OutputWorkspace") _UB = bool(self.getProperty("UBMatrix").value) MinValues = self.getProperty("MinValues").value MaxValues = self.getProperty("MaxValues").value if self.getProperty("OverwriteExisting").value: if mtd.doesExist(_outWS_name): DeleteWorkspace(_outWS_name) progress = Progress(self, 0.0, 1.0, len(self.getProperty("Filename").value)) for run in self.getProperty("Filename").value: logger.notice("Working on " + run) Load(Filename=run, OutputWorkspace='__run', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value, FilterByTimeStop=self.getProperty("FilterByTimeStop").value) if _load_inst: LoadInstrument( Workspace='__run', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__run', Filename=self.getProperty("DetCal").value) if _masking: if not mtd.doesExist('__mask'): LoadMask(Instrument=mtd['__run'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__run', MaskedWorkspace='__mask') if self.getProperty('SetGoniometer').value: SetGoniometer( Workspace='__run', Goniometers=self.getProperty('Goniometers').value, Axis0=self.getProperty('Axis0').value, Axis1=self.getProperty('Axis1').value, Axis2=self.getProperty('Axis2').value) if _UB: LoadIsawUB(InputWorkspace='__run', Filename=self.getProperty("UBMatrix").value) if len(MinValues) == 0 or len(MaxValues) == 0: MinValues, MaxValues = ConvertToMDMinMaxGlobal( '__run', dEAnalysisMode='Elastic', Q3DFrames='HKL', QDimensions='Q3D') ConvertToMD( InputWorkspace='__run', OutputWorkspace=_outWS_name, QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('Wproj').value, MinValues=MinValues, MaxValues=MaxValues, SplitInto=self.getProperty('SplitInto').value, SplitThreshold=self.getProperty('SplitThreshold').value, MaxRecursionDepth=self.getProperty( 'MaxRecursionDepth').value, OverwriteExisting=False) else: if len(MinValues) == 0 or len(MaxValues) == 0: MinValues, MaxValues = ConvertToMDMinMaxGlobal( '__run', dEAnalysisMode='Elastic', Q3DFrames='Q', QDimensions='Q3D') ConvertToMD( InputWorkspace='__run', OutputWorkspace=_outWS_name, QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='Q_sample', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('Wproj').value, MinValues=MinValues, MaxValues=MaxValues, SplitInto=self.getProperty('SplitInto').value, SplitThreshold=self.getProperty('SplitThreshold').value, MaxRecursionDepth=self.getProperty( 'MaxRecursionDepth').value, OverwriteExisting=False) DeleteWorkspace('__run') progress.report() if mtd.doesExist('__mask'): DeleteWorkspace('__mask') self.setProperty("OutputWorkspace", mtd[_outWS_name])
def load_instrument(hb2b_builder, arm_length, two_theta=0., center_shift_x=0., center_shift_y=0., rot_x_flip=0., rot_y_flip=0., rot_z_spin=0., raw_data_ws_name=None, idf_name=None, pixel_number=None): """ Load instrument to raw data file :param hb2b_builder: :param arm_length: full arm length :param two_theta: 2theta in sample log (instrument definition). It is opposite direction to Mantid coordinate :param center_shift_x: :param center_shift_y: :param rot_x_flip: :param rot_y_flip: :param rot_z_spin: :param raw_data_ws_name: :param idf_name: :param pixel_number: linear pixel size (row number and column number) :return: pixel matrix """ pixel_matrix = hb2b_builder.build_instrument(arm_length=arm_length, two_theta=-two_theta, center_shift_x=center_shift_x, center_shift_y=center_shift_y, rot_x_flip=rot_x_flip, rot_y_flip=rot_y_flip, rot_z_spin=0.) if True: # using Mantid # check assert raw_data_ws_name is not None, 'data ws error' assert idf_name is not None, 'IDF cannot be None' assert pixel_number is not None, 'Pixel number to be given' # set up sample logs # cal::arm # FIXME - No arm length calibration AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::arm', LogText='{}'.format(0.), LogType='Number Series', LogUnit='meter', NumberType='Double') # # cal::2theta AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::2theta', LogText='{}'.format(-two_theta), LogType='Number Series', LogUnit='degree', NumberType='Double') # # cal::deltax AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::deltax', LogText='{}'.format(center_shift_x), LogType='Number Series', LogUnit='meter', NumberType='Double') # # cal::deltay print('Shift Y = {}'.format(center_shift_y)) AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::deltay', LogText='{}'.format(center_shift_y), LogType='Number Series', LogUnit='meter', NumberType='Double') # cal::roty print('Rotation Y = {}'.format(rot_y_flip)) AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::roty', LogText='{}'.format(-two_theta - rot_y_flip), LogType='Number Series', LogUnit='degree', NumberType='Double') # cal::flip AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::flip', LogText='{}'.format(rot_x_flip), LogType='Number Series', LogUnit='degree', NumberType='Double') # cal::spin print('Rotation Z = {}'.format(rot_z_spin)) AddSampleLog(Workspace=raw_data_ws_name, LogName='cal::spin', LogText='{}'.format(rot_z_spin), LogType='Number Series', LogUnit='degree', NumberType='Double') print('Load instrument file : {}'.format(idf_name)) LoadInstrument(Workspace=raw_data_ws_name, Filename=idf_name, InstrumentName='HB2B', RewriteSpectraMap='True') workspace = mtd[raw_data_ws_name] # test 5 spots (corner and center): (0, 0), (0, 1023), (1023, 0), (1023, 1023), (512, 512) pixel_locations = [(0, 0), (0, pixel_number - 1), (pixel_number - 1, 0), (pixel_number - 1, pixel_number - 1), (pixel_number / 2, pixel_number / 2)] # compare position for index_i, index_j in pixel_locations: # print ('PyRS: ', pixel_matrix[index_i, index_j]) # print ('Mantid: ', workspace.getDetector(index_i + index_j * 1024).getPos()) # column major pos_python = pixel_matrix[index_i, index_j] index1d = index_i + pixel_number * index_j pos_mantid = workspace.getDetector(index1d).getPos() print('({}, {} / {}): {:10s} - {:10s} = {:10s}' ''.format(index_i, index_j, index1d, 'PyRS', 'Mantid', 'Diff')) for i in range(3): print('dir {}: {:10f} - {:10f} = {:10f}' ''.format(i, float(pos_python[i]), float(pos_mantid[i]), float(pos_python[i] - pos_mantid[i]))) # END-FOR # END-FOR # END-IF return pixel_matrix
m2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss LoadEmptyInstrument( Filename='/SNS/users/rwp/wand/IDF/test4/WAND_Definition.xml', OutputWorkspace='wand') m3 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss AddSampleLog('wand', LogName='HB2C:Mot:s2.RBV', LogText='17.57', LogType='Number Series') AddSampleLog('wand', LogName='HB2C:Mot:detz.RBV', LogText='7.05159', LogType='Number Series') m4 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss LoadInstrument('wand', Filename='/SNS/users/rwp/wand/IDF/test4/WAND_Definition.xml', RewriteSpectraMap=False) m5 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss print(m1 - m0) print(m2 - m1) print(m3 - m2) print(m4 - m3) print(m5 - m4) print(m0) print(m1) print(m2) print(m3) print(m4) print(m5)
def PyExec(self): """ Mantid required """ self.log().debug('Poldi Data Analysis ---- start') sample_info_ws = self.getProperty("InputWorkspace").value load_data_at_the_end = False try: sample_ipp_ws_name = self.getProperty("OutputWorkspace").value if(sample_ipp_ws_name == ""): sample_ipp_ws_name = "PoldiIPPmanager" self.log().debug('Poldi IPP manager ---- %s'%(sample_info_ws_name)) sample_ipp_ws = mtd["PoldiIPPmanager"] self.log().debug(' ---- workspace ipp loaded') except: self.log().debug(' ---- workspace ipp created') sample_ipp_ws = WorkspaceFactory.createTable() sample_ipp_ws.addColumn("str","spl Name") sample_ipp_ws.addColumn("str","ipp version") load_data_at_the_end = True wlen_min = self.getProperty("wlenmin").value wlen_max = self.getProperty("wlenmax").value bad_wires_threshold = self.getProperty("BadWiresThreshold").value peak_detect_threshold = self.getProperty("PeakDetectionThreshold").value self.log().information('Poldi run with parameters') self.log().information(' - wlen_min : %s' %(wlen_min)) self.log().information(' - wlen_max : %s' %(wlen_max)) self.log().information(' - bad_wires_threshold : %s' %(bad_wires_threshold)) self.log().information(' - peak_detect_threshold : %s' %(peak_detect_threshold)) dictsearch=os.path.join(config['instrumentDefinition.directory'],"nexusdictionaries","poldi.dic") self.log().information('Poldi instr folder - %s' %(dictsearch)) firstOne="" self.log().debug('Poldi - load data') nb_of_sample = sample_info_ws.rowCount() self.log().information('Poldi - %d samples listed' %(nb_of_sample)) for sample in range(nb_of_sample): sampleName = sample_info_ws.column("spl Name")[sample] filePath = sample_info_ws.column("data file")[sample] sampleNameLog = sample_info_ws.column("spl log")[sample] sampleDeadWires = sample_info_ws.column("spl dead wires")[sample] self.log().information('Poldi - sample %s' %(sampleName)) LoadSINQFile(Instrument="POLDI", Filename=filePath, OutputWorkspace=sampleName) sample_output_ws = mtd[sampleName] PoldiLoadLog(InputWorkspace=sample_output_ws, Filename=filePath, Dictionary=dictsearch, PoldiLog=sampleNameLog) cfgService = ConfigServiceImpl.Instance() LoadInstrument(Workspace=sample_output_ws, Filename=cfgService.getInstrumentDirectory() + "POLDI_Definition_ipp13.xml", RewriteSpectraMap=True) self.log().debug('Poldi - set ipp') sample_instrument = sample_output_ws.getInstrument() ipp_version = sample_instrument.getStringParameter("ipp")[0] add_this_ipp = True for ipp in range(sample_ipp_ws.rowCount()): if(sample_ipp_ws.column("ipp version")[ipp] == ipp_version): add_this_ipp = False if(add_this_ipp): sample_ipp_ws.addRow([sampleName, ipp_version]) self.log().debug('Poldi - dead wires') PoldiRemoveDeadWires(InputWorkspace=sample_output_ws, RemoveExcludedWires=True, AutoRemoveBadWires=False, BadWiresThreshold=bad_wires_threshold, PoldiDeadWires=sampleDeadWires) nb_of_ipp = sample_ipp_ws.rowCount() self.log().information('Poldi - %d ipp listed' %(nb_of_ipp)) for ipp in range(nb_of_ipp): ex_of_sample = sample_ipp_ws.column("spl Name")[ipp] PoldiIPP = sample_ipp_ws.column("ipp version")[ipp] ipp_chopper_slits = "%s_Chopper" %PoldiIPP ipp_Poldi_spectra = "%s_Spectra" %PoldiIPP ipp_ipp_data = "%s_Data" %PoldiIPP ex_of_sample_ws = mtd[ex_of_sample] self.log().debug('Poldi - chopper slits') PoldiLoadChopperSlits(InputWorkspace=ex_of_sample_ws, PoldiChopperSlits=ipp_chopper_slits) self.log().debug('Poldi - spectra') PoldiLoadSpectra(InputWorkspace=ex_of_sample_ws, PoldiSpectra=ipp_Poldi_spectra) self.log().debug('Poldi - IPP') PoldiLoadIPP(InputWorkspace=ex_of_sample_ws, PoldiIPP=ipp_ipp_data) for sample in range(nb_of_sample): sampleName = sample_info_ws.column("spl Name" )[sample] filePath = sample_info_ws.column("data file")[sample] sampleNameLog = sample_info_ws.column("spl log" )[sample] sampleDeadWires= sample_info_ws.column("spl dead wires" )[sample] sampleNameCorr = sample_info_ws.column("spl corr" )[sample] groupedResults = GroupWorkspaces([mtd[sampleName].name(), sampleNameLog, sampleDeadWires]) RenameWorkspace(InputWorkspace=groupedResults, OutputWorkspace="%s_Metadata" % sampleName) if(load_data_at_the_end): self.setProperty("OutputWorkspace", sample_ipp_ws)
def _create_workspace(self, ws_2D=True, sample=True, xAx=True, yAxSpec=True, yAxMt=True, instrument=True): """ create Workspace :param ws_2D: should workspace be 2D? :param sample: should workspace have sample logs? :param xAx: should x axis be DeltaE? :param yAxMt: should y axis be MomentumTransfer? :param yAxSpec: should y axis be SpectrumAxis? :param instrument: should workspace have a instrument? """ # Event Workspace if not ws_2D: ws = CreateSampleWorkspace("Event", "One Peak", XUnit="DeltaE") return ws if not xAx: ws = CreateWorkspace(DataX=self.data_x, DataY=self.data_y, DataE=np.sqrt(self.data_y), NSpec=1, UnitX="TOF") return ws if not instrument: ws = CreateWorkspace(DataX=self.data_x, DataY=self.data_y, DataE=np.sqrt(self.data_y), NSpec=1, UnitX="DeltaE") return ws if not yAxMt and not yAxSpec: ws = CreateWorkspace(DataX=self.data_x, DataY=self.data_y, DataE=np.sqrt(self.data_y), NSpec=1, UnitX="DeltaE") LoadInstrument(ws, True, InstrumentName="TOFTOF") ConvertSpectrumAxis(InputWorkspace=ws, OutputWorkspace=ws, Target="theta", EMode="Direct") return ws if not yAxSpec and yAxMt: ws = CreateWorkspace(DataX=self.data_x, DataY=self.data_y, DataE=np.sqrt(self.data_y), NSpec=1, UnitX="DeltaE") LoadInstrument(ws, True, InstrumentName="TOFTOF") self._add_all_sample_logs(ws) ConvertSpectrumAxis(InputWorkspace=ws, OutputWorkspace="ws2", Target="ElasticQ", EMode="Direct") ws2 = mtd["ws2"] return ws2 if not sample: ws = CreateWorkspace(DataX=self.data_x, DataY=self.data_y, DataE=np.sqrt(self.data_y), NSpec=1, UnitX="DeltaE") LoadInstrument(ws, False, InstrumentName="TOFTOF") for i in range(ws.getNumberHistograms()): ws.getSpectrum(i).setDetectorID(i + 1) return ws else: ws = CreateWorkspace(DataX=self.data_x, DataY=self.data_y, DataE=np.sqrt(self.data_y), NSpec=1, UnitX="DeltaE") LoadInstrument(ws, True, InstrumentName="TOFTOF") self._add_all_sample_logs(ws) return ws
def PyExec(self): runs = self.getProperty("Filename").value if not runs: ipts = self.getProperty("IPTS").value runs = [ '/HFIR/HB2C/IPTS-{}/nexus/HB2C_{}.nxs.h5'.format(ipts, run) for run in self.getProperty("RunNumbers").value ] wavelength = self.getProperty("wavelength").value outWS = self.getPropertyValue("OutputWorkspace") group_names = [] grouping = self.getProperty("Grouping").value if grouping == 'None': grouping = 1 else: grouping = 2 if grouping == '2x2' else 4 for i, run in enumerate(runs): data = np.zeros((512 * 480 * 8), dtype=np.int64) with h5py.File(run, 'r') as f: monitor_count = f['/entry/monitor1/total_counts'].value[0] run_number = f['/entry/run_number'].value[0] for b in range(8): data += np.bincount(f['/entry/bank' + str(b + 1) + '_events/event_id'].value, minlength=512 * 480 * 8) data = data.reshape((480 * 8, 512)) if grouping == 2: data = data[::2, ::2] + data[ 1::2, ::2] + data[::2, 1::2] + data[1::2, 1::2] elif grouping == 4: data = (data[::4, ::4] + data[1::4, ::4] + data[2::4, ::4] + data[3::4, ::4] + data[::4, 1::4] + data[1::4, 1::4] + data[2::4, 1::4] + data[3::4, 1::4] + data[::4, 2::4] + data[1::4, 2::4] + data[2::4, 2::4] + data[3::4, 2::4] + data[::4, 3::4] + data[1::4, 3::4] + data[2::4, 3::4] + data[3::4, 3::4]) CreateWorkspace(DataX=[wavelength - 0.001, wavelength + 0.001], DataY=data, DataE=np.sqrt(data), UnitX='Wavelength', YUnitLabel='Counts', NSpec=1966080 // grouping**2, OutputWorkspace='__tmp_load', EnableLogging=False) LoadNexusLogs('__tmp_load', Filename=run, EnableLogging=False) AddSampleLog('__tmp_load', LogName="monitor_count", LogType='Number', NumberType='Double', LogText=str(monitor_count), EnableLogging=False) AddSampleLog('__tmp_load', LogName="gd_prtn_chrg", LogType='Number', NumberType='Double', LogText=str(monitor_count), EnableLogging=False) AddSampleLog('__tmp_load', LogName="Wavelength", LogType='Number', NumberType='Double', LogText=str(wavelength), EnableLogging=False) AddSampleLog('__tmp_load', LogName="Ei", LogType='Number', NumberType='Double', LogText=str( UnitConversion.run('Wavelength', 'Energy', wavelength, 0, 0, 0, Elastic, 0)), EnableLogging=False) AddSampleLog('__tmp_load', LogName="run_number", LogText=run_number, EnableLogging=False) if grouping > 1: # Fix detector IDs per spectrum before loading instrument __tmp_load = mtd['__tmp_load'] for n in range(__tmp_load.getNumberHistograms()): s = __tmp_load.getSpectrum(n) for i in range(grouping): for j in range(grouping): s.addDetectorID( int(n * grouping % 512 + n // (512 / grouping) * 512 * grouping + j + i * 512)) LoadInstrument('__tmp_load', InstrumentName='WAND', RewriteSpectraMap=False, EnableLogging=False) else: LoadInstrument('__tmp_load', InstrumentName='WAND', RewriteSpectraMap=True, EnableLogging=False) SetGoniometer('__tmp_load', Axis0="HB2C:Mot:s1,0,1,0,1", EnableLogging=False) if self.getProperty("ApplyMask").value: MaskBTP('__tmp_load', Pixel='1,2,511,512', EnableLogging=False) if mtd['__tmp_load'].getRunNumber( ) > 26600: # They changed pixel mapping and bank name order here MaskBTP('__tmp_load', Bank='1', Tube='479-480', EnableLogging=False) MaskBTP('__tmp_load', Bank='8', Tube='1-2', EnableLogging=False) else: MaskBTP('__tmp_load', Bank='8', Tube='475-480', EnableLogging=False) if len(runs) == 1: RenameWorkspace('__tmp_load', outWS, EnableLogging=False) else: outName = outWS + "_" + str(mtd['__tmp_load'].getRunNumber()) group_names.append(outName) RenameWorkspace('__tmp_load', outName, EnableLogging=False) if len(runs) > 1: GroupWorkspaces(group_names, OutputWorkspace=outWS, EnableLogging=False) self.setProperty('OutputWorkspace', outWS)
def PyExec(self): fn = self.getPropertyValue("Filename") wsn = self.getPropertyValue("OutputWorkspace") monitor_workspace_name = self.getPropertyValue( "OutputMonitorWorkspace") if monitor_workspace_name == "": self.setPropertyValue("OutputMonitorWorkspace", wsn + '_Monitors') # print (fn, wsn) self.override_angle = self.getPropertyValue("AngleOverride") self.fxml = self.getPropertyValue("InstrumentXML") # load data parms_dict, det_udet, det_count, det_tbc, data = self.read_file(fn) nrows = int(parms_dict['NDET']) # nbins=int(parms_dict['NTC']) xdata = np.array(det_tbc) xdata_mon = np.linspace(xdata[0], xdata[-1], len(xdata)) ydata = data.astype(float) ydata = ydata.reshape(nrows, -1) edata = np.sqrt(ydata) # CreateWorkspace(OutputWorkspace=wsn,DataX=xdata,DataY=ydata,DataE=edata, # NSpec=nrows,UnitX='TOF',WorkspaceTitle='Data',YUnitLabel='Counts') nr, nc = ydata.shape ws = WorkspaceFactory.create("Workspace2D", NVectors=nr, XLength=nc + 1, YLength=nc) for i in range(nrows): ws.setX(i, xdata) ws.setY(i, ydata[i]) ws.setE(i, edata[i]) ws.getAxis(0).setUnit('tof') AnalysisDataService.addOrReplace(wsn, ws) # self.setProperty("OutputWorkspace", wsn) # print ("ws:", wsn) # ws=mtd[wsn] # fix the x values for the monitor for i in range(nrows - 2, nrows): ws.setX(i, xdata_mon) self.log().information("set detector IDs") # set detetector IDs for i in range(nrows): ws.getSpectrum(i).setDetectorID(det_udet[i]) # Sample_logs the header values are written into the sample logs log_names = [ str(sl.encode('ascii', 'ignore').decode()) for sl in parms_dict.keys() ] log_values = [ str(sl.encode('ascii', 'ignore').decode()) if isinstance( sl, UnicodeType) else str(sl) for sl in parms_dict.values() ] for i in range(len(log_values)): if ('nan' in log_values[i]) or ('NaN' in log_values[i]): log_values[i] = '-1.0' AddSampleLogMultiple(Workspace=wsn, LogNames=log_names, LogValues=log_values) SetGoniometer(Workspace=wsn, Goniometers='Universal') if (self.fxml == ""): LoadInstrument(Workspace=wsn, InstrumentName="Exed", RewriteSpectraMap=True) else: LoadInstrument(Workspace=wsn, Filename=self.fxml, RewriteSpectraMap=True) try: RotateInstrumentComponent( Workspace=wsn, ComponentName='Tank', Y=1, Angle=-float(parms_dict['phi'].encode('ascii', 'ignore')), RelativeRotation=False) except: self.log().warning( "The instrument does not contain a 'Tank' component. " "This means that you are using a custom XML instrument definition. " "OMEGA_MAG will be ignored.") self.log().warning( "Please make sure that the detector positions in the instrument definition are correct." ) # Separate monitors into seperate workspace __temp_monitors = ExtractSpectra( InputWorkspace=wsn, WorkspaceIndexList=','.join( [str(s) for s in range(nrows - 2, nrows)]), OutputWorkspace=self.getPropertyValue("OutputMonitorWorkspace")) # ExtractSpectra(InputWorkspace = wsn, WorkspaceIndexList = ','.join([str(s) for s in range(nrows-2, nrows)]), # OutputWorkspace = wsn + '_Monitors') MaskDetectors(Workspace=wsn, WorkspaceIndexList=','.join( [str(s) for s in range(nrows - 2, nrows)])) RemoveMaskedSpectra(InputWorkspace=wsn, OutputWorkspace=wsn) self.setProperty("OutputWorkspace", wsn) self.setProperty("OutputMonitorWorkspace", __temp_monitors)