def test_get_abs_normalization_factor(self) :
        mono_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=4, NumEvents=10000,XUnit='DeltaE',XMin=-5,XMax=15,BinWidth=0.1,function='Flat background')
        LoadInstrument(mono_ws,InstrumentName='MARI', RewriteSpectraMap=True)

        tReducer = DirectEnergyConversion(mono_ws.getInstrument())
        tReducer.prop_man.incident_energy = 5.
        tReducer.prop_man.monovan_integr_range=[-10,10]
        tReducer.wb_run = mono_ws

        (nf1,nf2,nf3,nf4) = tReducer.get_abs_normalization_factor(PropertyManager.wb_run,5.)
        self.assertAlmostEqual(nf1,0.58561121802167193,7)
        self.assertAlmostEqual(nf1,nf2)
        self.assertAlmostEqual(nf2,nf3)
        self.assertAlmostEqual(nf3,nf4)

        # check warning. WB spectra with 0 signal indicate troubles.
        mono_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=4, NumEvents=10000,XUnit='DeltaE',XMin=-5,XMax=15,BinWidth=0.1,function='Flat background')
        LoadInstrument(mono_ws,InstrumentName='MARI', RewriteSpectraMap=True)
        sig = mono_ws.dataY(0)
        sig[:]=0

        tReducer.wb_run = mono_ws
        (nf1,nf2,nf3,nf4) = tReducer.get_abs_normalization_factor(PropertyManager.wb_run,5.)
        self.assertAlmostEqual(nf1,0.585611218022,7)
        self.assertAlmostEqual(nf1,nf2)
        self.assertAlmostEqual(nf2,nf3)
        self.assertAlmostEqual(nf3,nf4)
    def __init__(self, instrumentName, web_var=None):
        """ sets properties defaults for the instrument with Name
          and define if wrapper runs from web services or not
        """
        # internal variable, indicating if we should try to wait for input files to appear
        self._wait_for_file = False
        #The property defines the run number, to validate. If defined, switches reduction wrapper from
        #reduction to validation mode
        self._run_number_to_validate = None
        # internal variable, used in system tests to validate workflow,
        # with waiting for files.  It is the holder to the function
        # used during debugging "wait for files" workflow
        # instead of Pause algorithm
        self._debug_wait_for_files_operation = None
        # tolerance to change in some tests if default is not working well
        self._tolerr = None

        # The variables which are set up from web interface or to be exported to
        # web interface
        if web_var:
            self._run_from_web = True
        else:
            self._run_from_web = False
        self._wvs = ReductionWrapper.var_holder(web_var)
        # Initialize reduced for given instrument
        self.reducer = DirectEnergyConversion(instrumentName)
        #
        web_vars = self._wvs.get_all_vars()
        if web_vars:
            self.reducer.prop_man.set_input_parameters(**web_vars)
    def test_remove_empty_bg(self):
        # create test workspace
        wksp = CreateSampleWorkspace(Function='Multiple Peaks',
                                     WorkspaceType='Event',
                                     NumBanks=3,
                                     BankPixelWidth=1,
                                     NumEvents=100,
                                     XUnit='TOF',
                                     XMin=2000,
                                     XMax=20000,
                                     BinWidth=1)
        CloneWorkspace(wksp, OutputWorkspace='bg_ws')
        AddSampleLog(Workspace=wksp,
                     LogName='gd_prtn_chrg',
                     LogText='10',
                     LogType='Number')
        AddSampleLog(Workspace='bg_ws',
                     LogName='gd_prtn_chrg',
                     LogText='100',
                     LogType='Number')

        # Prepare reducer
        tReducer = DirectEnergyConversion('MAR')
        tReducer.prop_man.sample_run = wksp
        tReducer.prop_man.empty_bg_run = 'bg_ws'

        tReducer.remove_empty_background()

        ws = PropertyManager.sample_run.get_workspace()
        self.assertTrue(ws.run().hasProperty('empty_bg_removed'))

        resWs = 0.9 * wksp
        difr = CompareWorkspaces(resWs, ws)
        self.assertTrue(difr.Result)
    def test_late_rebinning(self):
        run_monitors=CreateSampleWorkspace(Function='Multiple Peaks', NumBanks=4, BankPixelWidth=1, NumEvents=100000, XUnit='Energy',
                                                     XMin=3, XMax=200, BinWidth=0.1)
        LoadInstrument(run_monitors,InstrumentName='MARI')
        ConvertUnits(InputWorkspace='run_monitors', OutputWorkspace='run_monitors', Target='TOF')
        run_monitors = mtd['run_monitors']
        tof = run_monitors.dataX(3)
        tMin = tof[0]
        tMax = tof[-1]
        run = CreateSampleWorkspace( Function='Multiple Peaks',WorkspaceType='Event',NumBanks=8, BankPixelWidth=1, NumEvents=100000,
                                    XUnit='TOF',xMin=tMin,xMax=tMax)
        LoadInstrument(run,InstrumentName='MARI')
        wb_ws   = Rebin(run,Params=[tMin,1,tMax],PreserveEvents=False)

        # References used to test against ordinary reduction
        ref_ws = Rebin(run,Params=[tMin,1,tMax],PreserveEvents=False)
        ref_ws_monitors = CloneWorkspace('run_monitors')
        # just in case, wb should work without clone too.
        wb_clone = CloneWorkspace(wb_ws)

        # Run Mono
        tReducer = DirectEnergyConversion(run.getInstrument())
        tReducer.energy_bins =  [-20,0.2,60]
        ei_guess = 67.
        mono_s = tReducer.mono_sample(run, ei_guess,wb_ws)


        #
        mono_ref = tReducer.mono_sample(ref_ws, ei_guess,wb_clone)

        rez = CheckWorkspacesMatch(mono_s,mono_ref)
        self.assertEqual(rez,'Success!')
    def test_sum_monitors(self):
        # create test workspace
        monitor_ws=CreateSampleWorkspace(Function='Multiple Peaks', NumBanks=6, BankPixelWidth=1,\
                                            NumEvents=100000, XUnit='Energy', XMin=3, XMax=200, BinWidth=0.1)
        ConvertUnits(InputWorkspace=monitor_ws, OutputWorkspace='monitor_ws', Target='TOF')

        # Rebin to "formally" make common bin boundaries as it is not considered as such
        #any more after converting units (Is this a bug?)
        xx = monitor_ws.readX(0)
        x_min = min(xx[0],xx[-1])
        x_max= max(xx[0],xx[-1])
        x_step = (x_max-x_min)/(len(xx)-1)
        monitor_ws = Rebin(monitor_ws,Params=[x_min,x_step,x_max])
        monitor_ws = mtd['monitor_ws']
        #
        # keep this workspace for second test below -- clone and give
        # special name for RunDescriptor to recognize as monitor workspace for
        # fake data workspace we will provide.
        _TMPmonitor_ws_monitors = CloneWorkspace(monitor_ws)

        # Estimate energy from two monitors
        ei,mon1_peak,mon1_index,tzero = \
            GetEi(InputWorkspace=monitor_ws, Monitor1Spec=1,Monitor2Spec=4,
                  EnergyEstimate=62.2,FixEi=False)
        self.assertAlmostEqual(ei,62.1449,3)

        # Provide instrument parameter, necessary to define
        # DirectEnergyConversion class properly
        SetInstrumentParameter(monitor_ws,ParameterName='fix_ei',ParameterType='Number',Value='0')
        SetInstrumentParameter(monitor_ws,DetectorList=[1,2,3,6],ParameterName='DelayTime',\
                               ParameterType='Number',Value='0.5')
        SetInstrumentParameter(monitor_ws,ParameterName='mon2_norm_spec',\
                               ParameterType='Number',Value='1')

        # initiate test reducer
        tReducer = DirectEnergyConversion(monitor_ws.getInstrument())
        tReducer.prop_man.ei_mon_spectra= ([1,2,3],6)
        tReducer.prop_man.normalise_method = 'current'
        tReducer.prop_man.mon2_norm_spec = 2
        ei_mon_spectra  = tReducer.prop_man.ei_mon_spectra
        ei_mon_spectra,monitor_ws  = tReducer.sum_monitors_spectra(monitor_ws,ei_mon_spectra)
        #
        # Check GetEi with summed monitors. Try to run separately.
        ei1,mon1_peak,mon1_index,tzero = \
            GetEi(InputWorkspace=monitor_ws, Monitor1Spec=1,Monitor2Spec=6,
                  EnergyEstimate=62.2,FixEi=False)
        self.assertAlmostEqual(ei1,ei,2)

        # Second test Check get_ei as part of the reduction
        tReducer.prop_man.ei_mon_spectra= ([1,2,3],[4,5,6])
        tReducer.prop_man.fix_ei = False
        # DataWorkspace == monitor_ws data workspace is not used anyway. The only thing we
        # use it for is to retrieve monitor workspace from Mantid using its name
        ei2,mon1_peak2=tReducer.get_ei(monitor_ws,62.2)
        self.assertAlmostEqual(ei2,64.95,2)

        ei2b,mon1_peak2=tReducer.get_ei(monitor_ws,62.2)
        self.assertAlmostEqual(ei2b,64.95,2)
Beispiel #6
0
    def test_sum_monitors(self):
        # create test workspace
        monitor_ws=CreateSampleWorkspace(Function='Multiple Peaks', NumBanks=6, BankPixelWidth=1,\
                                            NumEvents=100000, XUnit='Energy', XMin=3, XMax=200, BinWidth=0.1)
        ConvertUnits(InputWorkspace=monitor_ws, OutputWorkspace='monitor_ws', Target='TOF')

        # Rebin to "formally" make common bin boundaries as it is not considered as such
        #any more after converting units (Is this a bug?)
        xx = monitor_ws.readX(0)
        x_min = min(xx[0],xx[-1])
        x_max= max(xx[0],xx[-1])
        x_step = (x_max-x_min)/(len(xx)-1)
        monitor_ws = Rebin(monitor_ws,Params=[x_min,x_step,x_max])
        monitor_ws = mtd['monitor_ws']
        #
        # keep this workspace for second test below -- clone and give
        # special name for RunDescriptor to recognize as monitor workspace for
        # fake data workspace we will provide.
        _TMPmonitor_ws_monitors = CloneWorkspace(monitor_ws)

        # Estimate energy from two monitors
        ei,mon1_peak,mon1_index,tzero = \
            GetEi(InputWorkspace=monitor_ws, Monitor1Spec=1,Monitor2Spec=4,
                  EnergyEstimate=62.2,FixEi=False)
        self.assertAlmostEqual(ei,62.1449,3)

        # Provide instrument parameter, necessary to define
        # DirectEnergyConversion class properly
        SetInstrumentParameter(monitor_ws,ParameterName='fix_ei',ParameterType='Number',Value='0')
        SetInstrumentParameter(monitor_ws,DetectorList=[1,2,3,6],ParameterName='DelayTime',\
                               ParameterType='Number',Value='0.5')
        SetInstrumentParameter(monitor_ws,ParameterName='mon2_norm_spec',\
                               ParameterType='Number',Value='1')

        # initiate test reducer
        tReducer = DirectEnergyConversion(monitor_ws.getInstrument())
        tReducer.prop_man.ei_mon_spectra= ([1,2,3],6)
        tReducer.prop_man.normalise_method = 'current'
        tReducer.prop_man.mon2_norm_spec = 2
        ei_mon_spectra  = tReducer.prop_man.ei_mon_spectra
        ei_mon_spectra,monitor_ws  = tReducer.sum_monitors_spectra(monitor_ws,ei_mon_spectra)
        #
        # Check GetEi with summed monitors. Try to run separately.
        ei1,mon1_peak,mon1_index,tzero = \
            GetEi(InputWorkspace=monitor_ws, Monitor1Spec=1,Monitor2Spec=6,
                  EnergyEstimate=62.2,FixEi=False)
        self.assertAlmostEqual(ei1,ei,2)

        # Second test Check get_ei as part of the reduction
        tReducer.prop_man.ei_mon_spectra= ([1,2,3],[4,5,6])
        tReducer.prop_man.fix_ei = False
        # DataWorkspace == monitor_ws data workspace is not used anyway. The only thing we
        # use it for is to retrieve monitor workspace from Mantid using its name
        ei2,mon1_peak2=tReducer.get_ei(monitor_ws,62.2)
        self.assertAlmostEqual(ei2,64.95,2)

        ei2b,mon1_peak2=tReducer.get_ei(monitor_ws,62.2)
        self.assertAlmostEqual(ei2b,64.95,2)
    def test_do_white_wb(self) :
        wb_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=4, NumEvents=10000)
        #LoadParameterFile(Workspace=wb_ws,ParameterXML = used_parameters)
        LoadInstrument(wb_ws,InstrumentName='MARI', RewriteSpectraMap=True)

        tReducer = DirectEnergyConversion(wb_ws.getInstrument())

        white_ws = tReducer.do_white(wb_ws, None, None)
        self.assertTrue(white_ws)
Beispiel #8
0
    def test_do_white_wb(self) :
        wb_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=4, NumEvents=10000)
        #LoadParameterFile(Workspace=wb_ws,ParameterXML = used_parameters)
        LoadInstrument(wb_ws,InstrumentName='MARI', RewriteSpectraMap=True)

        tReducer = DirectEnergyConversion(wb_ws.getInstrument())

        white_ws = tReducer.do_white(wb_ws, None, None)
        self.assertTrue(white_ws)
    def test_diagnostics_wb(self):
        wb_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=4, NumEvents=10000)
        LoadInstrument(wb_ws, InstrumentName="MARI", RewriteSpectraMap=True)

        tReducer = DirectEnergyConversion(wb_ws.getInstrument())

        mask_workspace = tReducer.diagnose(wb_ws)
        self.assertTrue(mask_workspace)

        api.AnalysisDataService.clear()
def custom_operation(custom_fun):
    DirectEnergyConversion.__setattr__()

    def custom_fun_wrapper(*args):
        # execute decorated function
        ws = custom_fun(*args)
        #print "in decorator: ",properties
        #host = args[0]
        return ws

    return custom_fun_wrapper
Beispiel #11
0
    def test_diagnostics_wb(self):
        wb_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=4, NumEvents=10000)
        LoadInstrument(wb_ws,InstrumentName='MARI', RewriteSpectraMap=True)

        tReducer = DirectEnergyConversion(wb_ws.getInstrument())


        mask_workspace=tReducer.diagnose(wb_ws)
        self.assertTrue(mask_workspace)

        api.AnalysisDataService.clear()
Beispiel #12
0
    def test_tof_range(self):

        run = CreateSampleWorkspace(Function='Multiple Peaks',
                                    NumBanks=6,
                                    BankPixelWidth=1,
                                    NumEvents=10,
                                    XUnit='Energy',
                                    XMin=5,
                                    XMax=75,
                                    BinWidth=0.2)
        LoadInstrument(run, InstrumentName='MARI', RewriteSpectraMap=True)

        red = DirectEnergyConversion(run.getInstrument())

        red.prop_man.incident_energy = 26.2
        red.prop_man.energy_bins = [-20, 0.1, 20]
        red.prop_man.multirep_tof_specta_list = [4, 5, 6]
        MoveInstrumentComponent(Workspace='run',
                                ComponentName='Detector',
                                DetectorID=1102,
                                Z=3)
        MoveInstrumentComponent(Workspace='run',
                                ComponentName='Detector',
                                DetectorID=1103,
                                Z=6)

        run_tof = ConvertUnits(run, Target='TOF', EMode='Elastic')

        tof_range = red.find_tof_range_for_multirep(run_tof)

        self.assertEqual(len(tof_range), 3)

        x = run_tof.readX(3)
        xMin = min(x)
        x = run_tof.readX(5)
        xMax = max(x)

        self.assertGreater(tof_range[0], xMin)
        # self.assertAlmostEqual(tof_range[1],dt)
        self.assertLess(tof_range[2], xMax)

        # check another working mode
        red.prop_man.multirep_tof_specta_list = 4
        red.prop_man.incident_energy = 47.505
        red.prop_man.energy_bins = [-20, 0.1, 45]

        tof_range1 = red.find_tof_range_for_multirep(run_tof)

        self.assertGreater(tof_range1[0], xMin)
        self.assertLess(tof_range1[2], xMax)

        self.assertLess(tof_range1[2], tof_range[2])
        self.assertLess(tof_range1[0], tof_range[0])
        self.assertLess(tof_range1[1], tof_range[1])
    def test_tof_range(self):

        run = CreateSampleWorkspace(
            Function="Multiple Peaks",
            NumBanks=6,
            BankPixelWidth=1,
            NumEvents=10,
            XUnit="Energy",
            XMin=5,
            XMax=75,
            BinWidth=0.2,
        )
        LoadInstrument(run, InstrumentName="MARI", RewriteSpectraMap=True)

        red = DirectEnergyConversion(run.getInstrument())

        red.prop_man.incident_energy = 26.2
        red.prop_man.energy_bins = [-20, 0.1, 20]
        red.prop_man.multirep_tof_specta_list = [4, 5, 6]
        MoveInstrumentComponent(Workspace="run", ComponentName="Detector", DetectorID=1102, Z=3)
        MoveInstrumentComponent(Workspace="run", ComponentName="Detector", DetectorID=1103, Z=6)

        run_tof = ConvertUnits(run, Target="TOF", EMode="Elastic")

        tof_range = red.find_tof_range_for_multirep(run_tof)

        self.assertEqual(len(tof_range), 3)

        x = run_tof.readX(3)
        dx = abs(x[1:] - x[:-1])
        xMin = min(x)
        dt = min(dx)
        x = run_tof.readX(5)
        xMax = max(x)

        self.assertTrue(tof_range[0] > xMin)
        # self.assertAlmostEqual(tof_range[1],dt)
        self.assertTrue(tof_range[2] < xMax)

        # check another working mode
        red.prop_man.multirep_tof_specta_list = 4
        red.prop_man.incident_energy = 47.505
        red.prop_man.energy_bins = [-20, 0.1, 45]

        tof_range1 = red.find_tof_range_for_multirep(run_tof)

        self.assertTrue(tof_range1[0] > xMin)
        self.assertTrue(tof_range1[2] < xMax)

        self.assertTrue(tof_range1[2] < tof_range[2])
        self.assertTrue(tof_range1[0] < tof_range[0])
        self.assertTrue(tof_range1[1] < tof_range[1])
Beispiel #14
0
    def test_late_rebinning(self):
        run_monitors = CreateSampleWorkspace(Function='Multiple Peaks',
                                             NumBanks=4,
                                             BankPixelWidth=1,
                                             NumEvents=100000,
                                             XUnit='Energy',
                                             XMin=3,
                                             XMax=200,
                                             BinWidth=0.1)
        LoadInstrument(run_monitors,
                       InstrumentName='MARI',
                       RewriteSpectraMap=True)
        ConvertUnits(InputWorkspace='run_monitors',
                     OutputWorkspace='run_monitors',
                     Target='TOF')
        run_monitors = mtd['run_monitors']
        tof = run_monitors.dataX(3)
        tMin = tof[0]
        tMax = tof[-1]
        run = CreateSampleWorkspace(Function='Multiple Peaks',
                                    WorkspaceType='Event',
                                    NumBanks=8,
                                    BankPixelWidth=1,
                                    NumEvents=100000,
                                    XUnit='TOF',
                                    xMin=tMin,
                                    xMax=tMax)
        LoadInstrument(run, InstrumentName='MARI', RewriteSpectraMap=True)

        run.setMonitorWorkspace(run_monitors)

        wb_ws = Rebin(run, Params=[tMin, 1, tMax], PreserveEvents=False)

        # References used to test against ordinary reduction
        ref_ws = Rebin(run, Params=[tMin, 1, tMax], PreserveEvents=False)
        ref_ws_monitors = CloneWorkspace('run_monitors')
        ref_ws.setMonitorWorkspace(ref_ws_monitors)
        # just in case, wb should work without clone too.
        wb_clone = CloneWorkspace(wb_ws)

        # Run Mono
        tReducer = DirectEnergyConversion(run.getInstrument())
        tReducer.energy_bins = [-20, 0.2, 60]
        ei_guess = 67.
        mono_s = tReducer.mono_sample(run, ei_guess, wb_ws)

        #
        mono_ref = tReducer.mono_sample(ref_ws, ei_guess, wb_clone)

        rez = CompareWorkspaces(mono_s, mono_ref)
        self.assertTrue(rez[0])
    def test_late_rebinning(self):
        run_monitors = CreateSampleWorkspace(
            Function="Multiple Peaks",
            NumBanks=4,
            BankPixelWidth=1,
            NumEvents=100000,
            XUnit="Energy",
            XMin=3,
            XMax=200,
            BinWidth=0.1,
        )
        LoadInstrument(run_monitors, InstrumentName="MARI", RewriteSpectraMap=True)
        ConvertUnits(InputWorkspace="run_monitors", OutputWorkspace="run_monitors", Target="TOF")
        run_monitors = mtd["run_monitors"]
        tof = run_monitors.dataX(3)
        tMin = tof[0]
        tMax = tof[-1]
        run = CreateSampleWorkspace(
            Function="Multiple Peaks",
            WorkspaceType="Event",
            NumBanks=8,
            BankPixelWidth=1,
            NumEvents=100000,
            XUnit="TOF",
            xMin=tMin,
            xMax=tMax,
        )
        LoadInstrument(run, InstrumentName="MARI", RewriteSpectraMap=True)

        run.setMonitorWorkspace(run_monitors)

        wb_ws = Rebin(run, Params=[tMin, 1, tMax], PreserveEvents=False)

        # References used to test against ordinary reduction
        ref_ws = Rebin(run, Params=[tMin, 1, tMax], PreserveEvents=False)
        ref_ws_monitors = CloneWorkspace("run_monitors")
        ref_ws.setMonitorWorkspace(ref_ws_monitors)
        # just in case, wb should work without clone too.
        wb_clone = CloneWorkspace(wb_ws)

        # Run Mono
        tReducer = DirectEnergyConversion(run.getInstrument())
        tReducer.energy_bins = [-20, 0.2, 60]
        ei_guess = 67.0
        mono_s = tReducer.mono_sample(run, ei_guess, wb_ws)

        #
        mono_ref = tReducer.mono_sample(ref_ws, ei_guess, wb_clone)

        rez = CheckWorkspacesMatch(mono_s, mono_ref)
        self.assertEqual(rez, "Success!")
Beispiel #16
0
    def test_multirep_mode(self):
        # create test workspace
        run_monitors=CreateSampleWorkspace(Function='Multiple Peaks', NumBanks=4, BankPixelWidth=1,\
                                           NumEvents=100000,XUnit='Energy', XMin=3, XMax=200, BinWidth=0.1)
        LoadInstrument(run_monitors,InstrumentName='MARI', RewriteSpectraMap=True)
        ConvertUnits(InputWorkspace='run_monitors', OutputWorkspace='run_monitors', Target='TOF')
        run_monitors = mtd['run_monitors']
        tof = run_monitors.dataX(3)
        tMin = tof[0]
        tMax = tof[-1]
        run = CreateSampleWorkspace( Function='Multiple Peaks',WorkspaceType='Event',NumBanks=8, BankPixelWidth=1,\
                                     NumEvents=100000, XUnit='TOF',xMin=tMin,xMax=tMax)
        LoadInstrument(run,InstrumentName='MARI', RewriteSpectraMap=True)
        MoveInstrumentComponent(Workspace='run', ComponentName='Detector', DetectorID=1102,Z=1)
       # MoveInstrumentComponent(Workspace='run', ComponentName='Detector', DetectorID=1103,Z=4)
       # MoveInstrumentComponent(Workspace='run', ComponentName='Detector', DetectorID=1104,Z=5)

        # do second
        run2 = CloneWorkspace(run)
        run2_monitors = CloneWorkspace(run_monitors)

        wb_ws   = Rebin(run,Params=[tMin,1,tMax],PreserveEvents=False)

        # Run multirep
        tReducer = DirectEnergyConversion(run.getInstrument())
        tReducer.prop_man.run_diagnostics=True
        tReducer.hard_mask_file=None
        tReducer.map_file=None
        tReducer.save_format=None
        tReducer.multirep_tof_specta_list = [4,5]

        result = tReducer.convert_to_energy(wb_ws,run,[67.,122.],[-2,0.02,0.8])

        self.assertEqual(len(result),2)

        ws1=result[0]
        self.assertEqual(ws1.getAxis(0).getUnit().unitID(),'DeltaE')
        x = ws1.readX(0)
        self.assertAlmostEqual(x[0],-2*67.)
        self.assertAlmostEqual(x[-1],0.8*67.)

        ws2=result[1]
        self.assertEqual(ws2.getAxis(0).getUnit().unitID(),'DeltaE')
        x = ws2.readX(0)
        self.assertAlmostEqual(x[0],-2*122.)
        self.assertAlmostEqual(x[-1],0.8*122.)

        # test another ws
        # rename samples from previous workspace to avoid deleting them on current run
        for ind,item in enumerate(result):
            result[ind]=RenameWorkspace(item,OutputWorkspace='SampleRez#'+str(ind))
        #
        result2 = tReducer.convert_to_energy(None,run2,[67.,122.],[-2,0.02,0.8])

        rez = CheckWorkspacesMatch(result[0],result2[0])
        self.assertEqual(rez,'Success!')
        rez = CheckWorkspacesMatch(result[1],result2[1])
        self.assertEqual(rez,'Success!')
Beispiel #17
0
    def __init__(self,instrumentName,web_var=None):
        """ sets properties defaults for the instrument with Name
          and define if wrapper runs from web services or not
        """
        # internal variable, indicating if we should try to wait for input files to appear
        self._wait_for_file = False
        #The property defines the run number, to validate. If defined, switches reduction wrapper from
        #reduction to validation mode
        self._run_number_to_validate=None
      # internal variable, used in system tests to validate workflow,
      # with waiting for files.  It is the holder to the function
      # used during debugging "wait for files" workflow
      # instead of Pause algorithm
        self._debug_wait_for_files_operation = None
        # tolerance to change in some tests if default is not working well
        self._tolerr=None

      # The variables which are set up from web interface or to be exported to
      # web interface
        if web_var:
            self._run_from_web = True
        else:
            self._run_from_web = False
        self._wvs = ReductionWrapper.var_holder(web_var)
      # Initialize reduced for given instrument
        self.reducer = DirectEnergyConversion(instrumentName)
        #
        web_vars = self._wvs.get_all_vars()
        if web_vars :
            self.reducer.prop_man.set_input_parameters(**web_vars)
    def test_multirep_mode(self):
        # create test workspace
        run_monitors=CreateSampleWorkspace(Function='Multiple Peaks', NumBanks=4, BankPixelWidth=1,\
                                           NumEvents=100000,XUnit='Energy', XMin=3, XMax=200, BinWidth=0.1)
        LoadInstrument(run_monitors,InstrumentName='MARI', RewriteSpectraMap=True)
        ConvertUnits(InputWorkspace='run_monitors', OutputWorkspace='run_monitors', Target='TOF')
        run_monitors = mtd['run_monitors']
        tof = run_monitors.dataX(3)
        tMin = tof[0]
        tMax = tof[-1]
        run = CreateSampleWorkspace( Function='Multiple Peaks',WorkspaceType='Event',NumBanks=8, BankPixelWidth=1,\
                                     NumEvents=100000, XUnit='TOF',xMin=tMin,xMax=tMax)
        LoadInstrument(run,InstrumentName='MARI', RewriteSpectraMap=True)
        MoveInstrumentComponent(Workspace='run', ComponentName='Detector', DetectorID=1102,Z=1)
       # MoveInstrumentComponent(Workspace='run', ComponentName='Detector', DetectorID=1103,Z=4)
       # MoveInstrumentComponent(Workspace='run', ComponentName='Detector', DetectorID=1104,Z=5)

        # do second
        run2 = CloneWorkspace(run)
        run2_monitors = CloneWorkspace(run_monitors)

        wb_ws   = Rebin(run,Params=[tMin,1,tMax],PreserveEvents=False)

        # Run multirep
        tReducer = DirectEnergyConversion(run.getInstrument())
        tReducer.prop_man.run_diagnostics=True
        tReducer.hard_mask_file=None
        tReducer.map_file=None
        tReducer.save_format=None
        tReducer.multirep_tof_specta_list = [4,5]

        result = tReducer.convert_to_energy(wb_ws,run,[67.,122.],[-2,0.02,0.8])

        self.assertEqual(len(result),2)

        ws1=result[0]
        self.assertEqual(ws1.getAxis(0).getUnit().unitID(),'DeltaE')
        x = ws1.readX(0)
        self.assertAlmostEqual(x[0],-2*67.)
        self.assertAlmostEqual(x[-1],0.8*67.)

        ws2=result[1]
        self.assertEqual(ws2.getAxis(0).getUnit().unitID(),'DeltaE')
        x = ws2.readX(0)
        self.assertAlmostEqual(x[0],-2*122.)
        self.assertAlmostEqual(x[-1],0.8*122.)

        # test another ws
        # rename samples from previous workspace to avoid deleting them on current run
        for ind,item in enumerate(result):
            result[ind]=RenameWorkspace(item,OutputWorkspace='SampleRez#'+str(ind))
        #
        result2 = tReducer.convert_to_energy(None,run2,[67.,122.],[-2,0.02,0.8])

        rez = CompareWorkspaces(result[0],result2[0])
        self.assertTrue(rez[0])
        rez = CompareWorkspaces(result[1],result2[1])
        self.assertTrue(rez[0])
Beispiel #19
0
    def test_energy_to_TOF_range(self):

        ws = Load(Filename='MAR11001.raw',LoadMonitors='Include')

        en_range = [0.8*13,13,1.2*13]
        detIDs=[1,2,3,10]
        red = DirectEnergyConversion()
        TRange = red.get_TOF_for_energies(ws,en_range,detIDs)
        for ind,detID in enumerate(detIDs):
            tof = TRange[ind]
            y = [1]*(len(tof)-1)
            ind = ws.getIndexFromSpectrumNumber(detID)
            ExtractSingleSpectrum(InputWorkspace=ws, OutputWorkspace='_ws_template', WorkspaceIndex=ind)
            CreateWorkspace(OutputWorkspace='TOF_WS',NSpec = 1,DataX=tof,DataY=y,UnitX='TOF',ParentWorkspace='_ws_template')
            EnWs=ConvertUnits(InputWorkspace='TOF_WS',Target='Energy',EMode='Elastic')

            eni = EnWs.dataX(0)
            for samp,rez in zip(eni,en_range): self.assertAlmostEqual(samp,rez)

        # Now Test shifted:
        ei,mon1_peak,mon1_index,tzero = GetEi(InputWorkspace=ws, Monitor1Spec=int(2), Monitor2Spec=int(3),EnergyEstimate=13)
        ScaleX(InputWorkspace='ws',OutputWorkspace='ws',Operation="Add",Factor=-mon1_peak,InstrumentParameter="DelayTime",Combine=True)
        ws = mtd['ws']

        mon1_det = ws.getDetector(1)
        mon1_pos = mon1_det.getPos()
        src_name = ws.getInstrument().getSource().getName()
        MoveInstrumentComponent(Workspace='ws',ComponentName= src_name, X=mon1_pos.getX(), Y=mon1_pos.getY(), Z=mon1_pos.getZ(), RelativePosition=False)

        # Does not work for monitor 2 as it has been moved to mon2 position and there all tof =0
        detIDs=[1,3,10]
        TRange1 = red.get_TOF_for_energies(ws,en_range,detIDs)

        for ind,detID in enumerate(detIDs):
            tof = TRange1[ind]
            y = [1]*(len(tof)-1)
            ind = ws.getIndexFromSpectrumNumber(detID)
            ExtractSingleSpectrum(InputWorkspace=ws, OutputWorkspace='_ws_template', WorkspaceIndex=ind)
            CreateWorkspace(OutputWorkspace='TOF_WS',NSpec = 1,DataX=tof,DataY=y,UnitX='TOF',ParentWorkspace='_ws_template')
            EnWs=ConvertUnits(InputWorkspace='TOF_WS',Target='Energy',EMode='Elastic')

            eni = EnWs.dataX(0)
            for samp,rez in zip(eni,en_range): self.assertAlmostEqual(samp,rez)
    def test_energy_to_TOF_range(self):

        ws = Load(Filename='MAR11001.raw',LoadMonitors='Include')

        en_range = [0.8*13,13,1.2*13]
        detIDs=[1,2,3,10]
        red = DirectEnergyConversion()
        TRange = red.get_TOF_for_energies(ws,en_range,detIDs)
        for ind,detID in enumerate(detIDs):
            tof = TRange[ind]
            y = [1]*(len(tof)-1)
            ind = ws.getIndexFromSpectrumNumber(detID)
            ExtractSingleSpectrum(InputWorkspace=ws, OutputWorkspace='_ws_template', WorkspaceIndex=ind)
            CreateWorkspace(OutputWorkspace='TOF_WS',NSpec = 1,DataX=tof,DataY=y,UnitX='TOF',ParentWorkspace='_ws_template')
            EnWs=ConvertUnits(InputWorkspace='TOF_WS',Target='Energy',EMode='Elastic')

            eni = EnWs.dataX(0)
            for samp,rez in zip(eni,en_range): self.assertAlmostEqual(samp,rez)

        # Now Test shifted:
        ei,mon1_peak,mon1_index,tzero = GetEi(InputWorkspace=ws, Monitor1Spec=int(2), Monitor2Spec=int(3),EnergyEstimate=13)
        ScaleX(InputWorkspace='ws',OutputWorkspace='ws',Operation="Add",Factor=-mon1_peak,InstrumentParameter="DelayTime",Combine=True)
        ws = mtd['ws']

        mon1_det = ws.getDetector(1)
        mon1_pos = mon1_det.getPos()
        src_name = ws.getInstrument().getSource().getName()
        MoveInstrumentComponent(Workspace='ws',ComponentName= src_name, X=mon1_pos.getX(), Y=mon1_pos.getY(), Z=mon1_pos.getZ(), RelativePosition=False)

        # Does not work for monitor 2 as it has been moved to mon2 position and there all tof =0
        detIDs=[1,3,10]
        TRange1 = red.get_TOF_for_energies(ws,en_range,detIDs)

        for ind,detID in enumerate(detIDs):
            tof = TRange1[ind]
            y = [1]*(len(tof)-1)
            ind = ws.getIndexFromSpectrumNumber(detID)
            ExtractSingleSpectrum(InputWorkspace=ws, OutputWorkspace='_ws_template', WorkspaceIndex=ind)
            CreateWorkspace(OutputWorkspace='TOF_WS',NSpec = 1,DataX=tof,DataY=y,UnitX='TOF',ParentWorkspace='_ws_template')
            EnWs=ConvertUnits(InputWorkspace='TOF_WS',Target='Energy',EMode='Elastic')

            eni = EnWs.dataX(0)
            for samp,rez in zip(eni,en_range): self.assertAlmostEqual(samp,rez)
Beispiel #21
0
    def test_get_abs_normalization_factor(self) :
        mono_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=4, NumEvents=10000,XUnit='DeltaE',XMin=-5,XMax=15,BinWidth=0.1,function='Flat background')
        LoadInstrument(mono_ws,InstrumentName='MARI', RewriteSpectraMap=True)

        tReducer = DirectEnergyConversion(mono_ws.getInstrument())
        tReducer.prop_man.incident_energy = 5.
        tReducer.prop_man.monovan_integr_range=[-10,10]
        tReducer.wb_run = mono_ws

        (nf1,nf2,nf3,nf4) = tReducer.get_abs_normalization_factor(PropertyManager.wb_run,5.)
        self.assertAlmostEqual(nf1,0.58561121802167193,7)
        self.assertAlmostEqual(nf1,nf2)
        self.assertAlmostEqual(nf2,nf3)
        self.assertAlmostEqual(nf3,nf4)

        # check warning. WB spectra with 0 signal indicate troubles.
        mono_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=4, NumEvents=10000,XUnit='DeltaE',XMin=-5,XMax=15,BinWidth=0.1,function='Flat background')
        LoadInstrument(mono_ws,InstrumentName='MARI', RewriteSpectraMap=True)
        sig = mono_ws.dataY(0)
        sig[:]=0

        tReducer.wb_run = mono_ws
        (nf1,nf2,nf3,nf4) = tReducer.get_abs_normalization_factor(PropertyManager.wb_run,5.)
        self.assertAlmostEqual(nf1,0.585611218022,7)
        self.assertAlmostEqual(nf1,nf2)
        self.assertAlmostEqual(nf2,nf3)
        self.assertAlmostEqual(nf3,nf4)
Beispiel #22
0
    def __init__(self,instrumentName,web_var=None):
      """ sets properties defaults for the instrument with Name 
          and define if wrapper runs from web services or not
      """
      # internal variable, indicating if we should try to wait for input files to appear
      self._wait_for_file=False

      # The variables which are set up from web interface or to be exported to 
      # web interface
      if web_var: 
        self._run_from_web = True
        self._wvs = web_var
      else:
        self._run_from_web = False
        self._wvs = ReductionWrapper.var_holder()
      # Initialize reduced for given instrument
      self.reducer = DirectEnergyConversion(instrumentName)

      self._validation_fname=None
Beispiel #23
0
 def setUp(self):
     if self.reducer == None or type(self.reducer) != type(DirectEnergyConversion):
         self.reducer = DirectEnergyConversion("MAR")
    def test_multirep_abs_units_mode(self):
        # create test workspace
        run_monitors = CreateSampleWorkspace(
            Function="Multiple Peaks",
            NumBanks=4,
            BankPixelWidth=1,
            NumEvents=100000,
            XUnit="Energy",
            XMin=3,
            XMax=200,
            BinWidth=0.1,
        )
        LoadInstrument(run_monitors, InstrumentName="MARI", RewriteSpectraMap=True)
        ConvertUnits(InputWorkspace="run_monitors", OutputWorkspace="run_monitors", Target="TOF")
        run_monitors = mtd["run_monitors"]
        tof = run_monitors.dataX(3)
        tMin = tof[0]
        tMax = tof[-1]
        run = CreateSampleWorkspace(
            Function="Multiple Peaks",
            WorkspaceType="Event",
            NumBanks=8,
            BankPixelWidth=1,
            NumEvents=100000,
            XUnit="TOF",
            xMin=tMin,
            xMax=tMax,
        )
        LoadInstrument(run, InstrumentName="MARI", RewriteSpectraMap=True)

        # build "monovanadium"
        mono = CloneWorkspace(run)
        mono_monitors = CloneWorkspace(run_monitors)

        # build "White-beam"
        wb_ws = Rebin(run, Params=[tMin, 1, tMax], PreserveEvents=False)

        # build "second run" to ensure repeated execution
        run2 = CloneWorkspace(run)
        run2_monitors = CloneWorkspace(run_monitors)

        # Run multirep
        tReducer = DirectEnergyConversion(run.getInstrument())
        tReducer.prop_man.run_diagnostics = True
        tReducer.hard_mask_file = None
        tReducer.map_file = None
        tReducer.prop_man.background_range = [0.99 * tMax, tMax]
        tReducer.prop_man.monovan_mapfile = None
        tReducer.save_format = None
        tReducer.prop_man.normalise_method = "monitor-1"
        tReducer.norm_mon_integration_range = [tMin, tMax]

        result = tReducer.convert_to_energy(wb_ws, run, [67.0, 122.0], [-2, 0.02, 0.8], None, mono)

        self.assertEqual(len(result), 2)

        ws1 = result[0]
        self.assertEqual(ws1.getAxis(0).getUnit().unitID(), "DeltaE")
        x = ws1.readX(0)
        self.assertAlmostEqual(x[0], -2 * 67.0)
        self.assertAlmostEqual(x[-1], 0.8 * 67.0)

        ws2 = result[1]
        self.assertEqual(ws2.getAxis(0).getUnit().unitID(), "DeltaE")
        x = ws2.readX(0)
        self.assertAlmostEqual(x[0], -2 * 122.0)
        self.assertAlmostEqual(x[-1], 0.8 * 122.0)

        # test another ws
        # rename samples from previous workspace to avoid deleting them on current run
        for ind, item in enumerate(result):
            result[ind] = RenameWorkspace(item, OutputWorkspace="SampleRez#" + str(ind))
        #
        result2 = tReducer.convert_to_energy(None, run2)

        rez = CheckWorkspacesMatch(result[0], result2[0])
        self.assertEqual(rez, "Success!")
        rez = CheckWorkspacesMatch(result[1], result2[1])
        self.assertEqual(rez, "Success!")
Beispiel #25
0
class ReductionWrapper(object):
    """ Abstract class provides interface to direct inelastic reduction
        allowing it to be run  from Mantid, web services, or system tests
        using the same interface and the same run file placed in different
        locations.
    """
    class var_holder(object):
        """ A simple wrapper class to keep web variables"""
        def __init__(self,Web_vars=None):
            if Web_vars:
                self.standard_vars = Web_vars.standard_vars
                self.advanced_vars = Web_vars.advanced_vars
            else:
                self.standard_vars = None
                self.advanced_vars = None
        #
        def get_all_vars(self):
            """Return dictionary with all defined variables
               combined together
            """
            web_vars = {}
            if self.advanced_vars:
                web_vars = self.advanced_vars.copy()
            if self.standard_vars:
                if len(web_vars)>0:
                    web_vars.update(self.standard_vars)
                else:
                    web_vars = self.standard_vars.copy()
            return web_vars


    def __init__(self,instrumentName,web_var=None):
        """ sets properties defaults for the instrument with Name
          and define if wrapper runs from web services or not
        """
        # internal variable, indicating if we should try to wait for input files to appear
        self._wait_for_file = False
        #The property defines the run number, to validate. If defined, switches reduction wrapper from
        #reduction to validation mode
        self._run_number_to_validate=None
      # internal variable, used in system tests to validate workflow,
      # with waiting for files.  It is the holder to the function
      # used during debugging "wait for files" workflow
      # instead of Pause algorithm
        self._debug_wait_for_files_operation = None
        # tolerance to change in some tests if default is not working well
        self._tolerr=None

      # The variables which are set up from web interface or to be exported to
      # web interface
        if web_var:
            self._run_from_web = True
        else:
            self._run_from_web = False
        self._wvs = ReductionWrapper.var_holder(web_var)
      # Initialize reduced for given instrument
        self.reducer = DirectEnergyConversion(instrumentName)
        #
        web_vars = self._wvs.get_all_vars()
        if web_vars :
            self.reducer.prop_man.set_input_parameters(**web_vars)


    @property
    def wait_for_file(self):
        """ If this variable set to positive value, this value
            is interpreted as time to wait until check for specified run file
            if this file have not been find immediately.

            if this variable is 0 or false and the file have not been found,
            reduction will fail
        """
        return self._wait_for_file

    @wait_for_file.setter
    def wait_for_file(self,value):
        if value > 0:
            self._wait_for_file = value
        else:
            self._wait_for_file = False
#
    def save_web_variables(self,FileName=None):
        """ Method to write simple and advanced properties and help
            information  into dictionary, to use by web reduction
            interface

            If no file is provided, reduce_var.py file will be written
            to the folder, containing current script

        """
        if not FileName:
            FileName = 'reduce_vars.py'

        f = open(FileName,'w')
        f.write("standard_vars = {\n")
        str_wrapper = '         '
        for key,val in self._wvs.standard_vars.iteritems():
            if isinstance(val,str):
                row = "{0}\'{1}\':\'{2}\'".format(str_wrapper,key,val)
            else:
                row = "{0}\'{1}\':{2}".format(str_wrapper,key,val)
            f.write(row)
            str_wrapper = ',\n         '
        f.write("\n}\nadvanced_vars={\n")
        #print advances variables
        str_wrapper = '         '
        for key,val in self._wvs.advanced_vars.iteritems():
            if isinstance(val,str):
                row = "{0}\'{1}\':\'{2}\'".format(str_wrapper,key,val)
            else:
                row = "{0}\'{1}\':{2}".format(str_wrapper,key,val)
            f.write(row)
            str_wrapper = ',\n        '

        def write_help_block(fhandle,block_name,block_dict):
            str_wrapper = '         '
            row = "{0}\'{1}\' : {{\n".format(str_wrapper,block_name)
            fhandle.write(row)
            for key in block_dict:
                try:
                    prop = getattr(PropertyManager,key)
                    docstring = prop.__doc__
                    if not docstring:
                        continue
                except:
                    continue
                contents = self._do_format(docstring)
                row = "{0}\'{1}\':\'{2}\'".format(str_wrapper,key,contents)
                fhandle.write(row)
                str_wrapper = ',\n        '
            fhandle.write('{0} }},\n'.format(str_wrapper))

        f.write("\n}\nvariable_help={\n")
        write_help_block(f,"standard_vars",self._wvs.standard_vars)
        write_help_block(f,"advanced_vars",self._wvs.advanced_vars)
        f.write("}\n")
        f.close()

    def _do_format(self,docstring):
        """Format docstring to write it as string in the reduce_var file"""
        contents = re.sub(" +"," ",docstring)
        contents = contents.split('\n')
        contents = '\\n'.join(contents)
        return contents

    @property
    def validate_run_number(self):
        """The property defines the run number to validate. If defined, switches reduction wrapper from
           reduction to validation mode, where reduction tries to load result, previously calculated,
           for this run and then compare this result with the result, defined earlier"""
        return self._run_number_to_validate

    @validate_run_number.setter
    def validate_run_number(self,val):
        if val is None:
            self._run_number_to_validate = None
        else:
            self._run_number_to_validate = int(val)

    def validate_settings(self):
        """ method validates initial parameters, provided for reduction"""
        self.def_advanced_properties()
        self.def_main_properties()
        if self._run_from_web:
            web_vars = self._wvs.get_all_vars()
            self.reducer.prop_man.set_input_parameters(**web_vars)
        else:
            pass # we should already set up these variables using
            # def_main_properties & def_advanced_properties
        # validate properties and report result
        return self.reducer.prop_man.validate_properties(False)
#
    def validation_file_name(self):
        """ the name of the file, used as reference to
            validate the run, specified as the class property

            The method can be overloaded to return a workspace
            or workspace name to validate results against.
        """
        if not PropertyManager.save_file_name._file_name is None:
            file_name = PropertyManager.save_file_name._file_name
            if isinstance(file_name,api.Workspace):
                return file_name
        else:
            instr = self.reducer.prop_man.instr_name
            run_n = self.validate_run_number
            ei    = PropertyManager.incident_energy.get_current()
            file_name = '{0}{1}_{2:<3.2f}meV_VALIDATION_file.nxs'.format(instr,run_n,ei)
        run_dir = self.validation_file_place()
        full_name = os.path.join(run_dir,file_name)
        return full_name

    def validation_file_place(self):
        """Redefine this to the place, where validation file, used in conjunction with
           'validate_run' property, located. Here it defines the place to this script folder.
           By default it looks for/places it in a default save directory"""
        return config['defaultsave.directory']

#
    def validate_result(self,Error=1.e-6,ToleranceRelErr=True):
        """Method to validate result against existing validation file
         or workspace

         Change this method to verify different results or validate results differently"""
        rez,message = ReductionWrapper.build_or_validate_result(self,
                                     Error,ToleranceRelErr)
        return rez,message
   #

    def set_custom_output_filename(self):
        """ define custom name of output files if standard one is not satisfactory
          User expected to overload this method within class instantiation """
        return None


    def build_or_validate_result(self,Error=1.e-6,ToleranceRelErr=True):
        """ Method validates results of the reduction against reference file or workspace.

            Inputs:
            sample_run     -- the run number to reduce or validate against existing result
            validation_file -- The name of nxs file, containing workspace, produced by reducing SampleRun,
                              or the pointer to the workspace, which is the reference workspace
                              for SampleRun reduction.

            Returns:
            True   if reduction for sample_run produces result within Error from the reference file
                   as reported by CheckWorkspaceMatch.
            False  if CheckWorkspaceMatch comparison between sample and reduction is unsuccessful

            True  if was not able to load reference file. In this case, algorithm builds validation
                  file and returns True if the reduction and saving of this file is successful

        """
        # this row defines location of the validation file
        validation_file = self.validation_file_name()
        sample_run = self.validate_run_number
        if isinstance(validation_file,str):
            path,name = os.path.split(validation_file)
            if name in mtd:
                reference_ws = mtd[name]
                build_validation = False
                fileName = "workspace:"+reference_ws.name()
            else:
                if len(path)>0:
                    config.appendDataSearchDir(path)
                # it there bug in getFullPath? It returns the same string if given full path
                # but file has not been found
                name,fext=os.path.splitext(name)
                fileName = FileFinder.getFullPath(name+'.nxs')
                if len(fileName)>0:
                    build_validation = False
                    try:
                        reference_ws = Load(fileName)
                    except:
                        build_validation = True
                else:
                    build_validation = True
        elif isinstance(validation_file,api.Workspace):
        # its workspace:
            reference_ws = validation_file
            build_validation = False
            fileName = "workspace:"+reference_ws.name()
        else:
            build_validation = True
        #--------------------------------------------------------
        if build_validation:
            self.reducer.prop_man.save_file_name = validation_file
            self.reducer.prop_man.log\
                 ("*** WARNING:can not find or load validation file {0}\n"\
                  "    Building validation file for run N:{1}".format(validation_file,sample_run),'warning')
        else:
            self.reducer.prop_man.log\
                 ("*** FOUND VALIDATION FILE: {0}\n"\
                  "    Validating run {1} against this file".format(fileName,sample_run),'warning')

        # just in case, to be sure
        current_web_state = self._run_from_web
        current_wait_state = self.wait_for_file
        # disable wait for input and
        self._run_from_web = False
        self.wait_for_file = False
        #
        self.def_advanced_properties()
        self.def_main_properties()
        #
        self.reducer.sample_run = sample_run
        self.reducer.prop_man.save_format = None

        reduced = self.reduce()

        if build_validation:
            self.reducer.prop_man.save_file_name = None
            result_name = os.path.splitext(validation_file)[0]
            self.reducer.prop_man.log("*** Saving validation file with name: {0}.nxs".format(result_name),'notice')
            SaveNexus(reduced,Filename=result_name + '.nxs')
            return True,'Created validation file {0}.nxs'.format(result_name)
        else:
            if isinstance(reduced,list): # check only first result in multirep
                reduced = reduced[0]
            # Cheat! Counterintuitive!
            if self._tolerr:
                TOLL=self._tolerr
            else:
                TOLL = Error
            result = CheckWorkspacesMatch(Workspace1=reference_ws,Workspace2=reduced,\
                                      Tolerance=TOLL,CheckSample=False,\
                                      CheckInstrument=False,ToleranceRelErr=ToleranceRelErr)

        self.wait_for_file = current_wait_state
        self._run_from_web = current_web_state
        if result == 'Success!':
            return True,'Reference file and reduced workspace are equal with accuracy {0:<3.2f}'\
                        .format(TOLL)
        else:
            fname,ext = os.path.splitext(fileName)
            filename = fname+'-mismatch.nxs'
            self.reducer.prop_man.log("***WARNING: can not get results matching the reference file.\n"\
                                      "   Saving new results to file {0}".format(filename),'warning')
            SaveNexus(reduced,Filename=filename)
            return False,result

    @abstractmethod
    def def_main_properties(self):
        """ Define properties which considered to be main properties changeable by user

            Should be overwritten by special reduction and decorated with  @MainProperties decorator.

            Should return dictionary with key are the properties names and values -- the default
            values these properties should have.
        """
        raise NotImplementedError('def_main_properties  has to be implemented')
    @abstractmethod
    def def_advanced_properties(self):
        """ Define properties which considered to be advanced but still changeable by instrument scientist or advanced user

            Should be overwritten by special reduction and decorated with  @AdvancedProperties decorator.

            Should return dictionary with key are the properties names and values -- the default
            values these properties should have.
        """

        raise NotImplementedError('def_advanced_properties  has to be implemented')
    #
    def _run_pause(self,timeToWait=0):
        """ a wrapper around pause algorithm allowing to run something
            instead of pause in debug mode
        """

        if not self._debug_wait_for_files_operation is None:
            self._debug_wait_for_files_operation()
        else:
            Pause(timeToWait)
    #
    def reduce(self,input_file=None,output_directory=None):
        """ The method performs all main reduction operations over
            single run file

            Wrap it into @iliad wrapper to switch input for
            reduction properties between script and web variables
        """
        if input_file:
            self.reducer.sample_run = str(input_file)
        if output_directory:
            config['defaultsave.directory'] = str(output_directory)

        timeToWait = self._wait_for_file
        wait_counter=0
        if timeToWait > 0:
            Found,input_file = PropertyManager.sample_run.find_file(be_quet=True)
            while not Found:
                file_hint,fext = PropertyManager.sample_run.file_hint()
                self.reducer.prop_man.log("*** Waiting {0} sec for file {1} to appear on the data search path"\
                    .format(timeToWait,file_hint),'notice')

                self._run_pause(timeToWait)
                Found,input_file = PropertyManager.sample_run.find_file(file_hint=file_hint,be_quet=True)
                if Found:
                    file,found_ext=os.path.splitext(input_file)
                    if found_ext != fext:
                        wait_counter+=1
                        if wait_counter<2:
                            timeToWait =60
                            self.reducer.prop_man.log(\
                            "*** Requested file with extension {0} but found one with extension {1}\n"\
                            "    The target may not have been delivered from the DAE machine\n".format(fext,found_ext))
                            Found = False
                        else:
                            wait_counter = 0
                else:
                    pass # not found, wait more
            #endWhile
            converted_to_energy_transfer_ws = self.reducer.convert_to_energy(None,input_file)

        else:
            converted_to_energy_transfer_ws = self.reducer.convert_to_energy(None,input_file)

        return converted_to_energy_transfer_ws
    #
    def sum_and_reduce(self):
        """ procedure used to sum and reduce runs in case when not all files
           are available and user have to wait for these files to appear
       """
        if not PropertyManager.sample_run._run_list:
            raise RuntimeError("sum_and_reduce expects run file list to be defined")

        self.reducer.prop_man.sum_runs = True

        timeToWait = self._wait_for_file
        self._wait_for_file = 0
        if timeToWait > 0:
            run_files = PropertyManager.sample_run.get_run_list()
            num_files_to_sum = len(PropertyManager.sample_run)

            ok,missing,found = self.reducer.prop_man.find_files_to_sum()
            n_found = len(found)
            if not ok:
              # necessary to cache intermediate sums in memory
                self.reducer.prop_man.cashe_sum_ws = True
            while not ok:
                while n_found > 0:
                    last_found = found[-1]
                    self.reducer.prop_man.sample_run = last_found # request to reduce all up to last found
                    # Note that here we run convert to energy instead of user (may be) reloaded reduction!
                    # This would cause problem for user-defined reduction, which pre-process rather than
                    # post-process resulting workspace
                    ws = self.reducer.convert_to_energy()
                 # reset search to whole file list again
                    self.reducer.prop_man.sample_run = run_files[num_files_to_sum - 1]
                    ok,missing,found = self.reducer.prop_man.find_files_to_sum()
                    n_found = len(found)
                    if ok: # no need to cache sum any more.  All necessary files found
                        self.reducer.prop_man.cashe_sum_ws = False

                self.reducer.prop_man.log("*** Waiting {0} sec for runs {1} to appear on the data search path"\
                    .format(timeToWait,str(missing)),'notice')
                self._run_pause(timeToWait)
                ok,missing,found = self.reducer.prop_man.find_files_to_sum()
                n_found = len(found)
          #end not(ok)
            if n_found > 0:
            # cash sum can be dropped now if it has not been done before
                self.reducer.prop_man.cashe_sum_ws = False
                ws = self.reduce()
        else:
            ws = self.reduce()
        self._wait_for_file = timeToWait
        return ws
    #
    def run_reduction(self):
        """" Reduces runs one by one or sum all them together and reduce after this

            if wait_for_file time is > 0, it will until  missing files appear on the
            data search path
        """
        try:
            n,r = funcreturns.lhs_info('both')
            out_ws_name = r[0]
        except:
            out_ws_name = None

        # if this is not None, we want to run validation not reduction
        if self.validate_run_number:
            self.reducer.prop_man.log\
            ("**************************************************************************************",'warning')
            self.reducer.prop_man.log\
            ("**************************************************************************************",'warning')
            rez,mess=self.build_or_validate_result()
            if rez:
                self.reducer.prop_man.log("*** SUCCESS! {0}".format(mess))
                self.reducer.prop_man.log\
               ("**************************************************************************************",'warning')

            else:
                self.reducer.prop_man.log("*** VALIDATION FAILED! {0}".format(mess))
                self.reducer.prop_man.log\
               ("**************************************************************************************",'warning')
                raise RuntimeError("Validation against old data file failed")
            self.validate_run_number=None
            return rez,mess

        if self.reducer.sum_runs:
# --------### sum runs provided ------------------------------------###
            if out_ws_name is None:
                self.sum_and_reduce()
                return None
            else:
                red_ws = self.sum_and_reduce()
                RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_ws_name)
                return mtd[out_ws_name]
        else:
# --------### reduce list of runs one by one ----------------------------###
            runfiles = PropertyManager.sample_run.get_run_file_list()
            if out_ws_name is None:
                for file in runfiles:
                    self.reduce(file)
                return None
            else:
                results = []
                nruns = len(runfiles)
                for num,file in enumerate(runfiles):
                    red_ws = self.reduce(file)
                    if isinstance(red_ws,list):
                        for ws in red_ws:
                            results.append(ws)
                    else:
                        if nruns == 1:
                            if red_ws.name() != out_ws_name:
                                RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_ws_name)
                            results.append(mtd[out_ws_name])
                        else:
                            OutWSName = '{0}#{1}of{2}'.format(out_ws_name,num+1,nruns)
                            if red_ws.name() != out_ws_name:
                                RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=OutWSName)
                            results.append(mtd[OutWSName])
                #end
                if len(results) == 1:
                    return results[0]
                else:
                    return results
Beispiel #26
0
    def test_abs_multirep_with_bkg_and_bleed(self):
        # create test workspace
        run_monitors=CreateSampleWorkspace(Function='Multiple Peaks', NumBanks=4, BankPixelWidth=1,\
                                            NumEvents=100000, XUnit='Energy', XMin=3, XMax=200, BinWidth=0.1)
        LoadInstrument(run_monitors,InstrumentName='MARI', RewriteSpectraMap=True)
        ConvertUnits(InputWorkspace='run_monitors', OutputWorkspace='run_monitors', Target='TOF')
        run_monitors = mtd['run_monitors']
        tof = run_monitors.dataX(3)
        tMin = tof[0]
        tMax = tof[-1]
        run = CreateSampleWorkspace( Function='Multiple Peaks',WorkspaceType='Event',NumBanks=8, BankPixelWidth=1,\
                                     NumEvents=100000, XUnit='TOF',xMin=tMin,xMax=tMax)
        LoadInstrument(run,InstrumentName='MARI', RewriteSpectraMap=True)
        AddSampleLog(run,LogName='gd_prtn_chrg',LogText='1.',LogType='Number')
        run.setMonitorWorkspace(run_monitors)

        # build "monovanadium"
        mono = CloneWorkspace(run)
        mono_monitors = CloneWorkspace(run_monitors)
        mono.setMonitorWorkspace(mono_monitors)

        # build "White-beam"
        wb_ws   = Rebin(run,Params=[tMin,1,tMax],PreserveEvents=False)

        # build "second run" to ensure repeated execution
        run2 = CloneWorkspace(run)
        run2_monitors = CloneWorkspace(run_monitors)
        run2.setMonitorWorkspace(run2_monitors)

        # Run multirep
        tReducer = DirectEnergyConversion(run.getInstrument())
        tReducer.prop_man.run_diagnostics=True 
        tReducer.hard_mask_file=None
        tReducer.map_file=None
        tReducer.prop_man.check_background = True
        tReducer.prop_man.background_range=[0.99*tMax,tMax]
        tReducer.prop_man.monovan_mapfile=None
        tReducer.save_format=None
        tReducer.prop_man.normalise_method='monitor-2'

        tReducer.prop_man.bleed = True
        tReducer.norm_mon_integration_range=[tMin,tMax]

        AddSampleLog(run,LogName='good_frames',LogText='1.',LogType='Number Series')
        result = tReducer.convert_to_energy(wb_ws,run,[67.,122.],[-2,0.02,0.8],None,mono)

        self.assertEqual(len(result),2)

        ws1=result[0]
        self.assertEqual(ws1.getAxis(0).getUnit().unitID(),'DeltaE')
        x = ws1.readX(0)
        self.assertAlmostEqual(x[0],-2*67.)
        self.assertAlmostEqual(x[-1],0.8*67.)

        ws2=result[1]
        self.assertEqual(ws2.getAxis(0).getUnit().unitID(),'DeltaE')
        x = ws2.readX(0)
        self.assertAlmostEqual(x[0],-2*122.)
        self.assertAlmostEqual(x[-1],0.8*122.)

        # test another ws
        # rename samples from previous workspace to avoid deleting them on current run
        for ind,item in enumerate(result):
            result[ind]=RenameWorkspace(item,OutputWorkspace='SampleRez#'+str(ind))
        #
        AddSampleLog(run2,LogName='goodfrm',LogText='1',LogType='Number')
        result2 = tReducer.convert_to_energy(None,run2)

        rez = CheckWorkspacesMatch(result[0],result2[0])
        self.assertEqual(rez,'Success!')
        rez = CheckWorkspacesMatch(result[1],result2[1])
        self.assertEqual(rez,'Success!')
class ReductionWrapper(object):
    """ Abstract class provides interface to direct inelastic reduction
        allowing it to be run  from Mantid, web services, or system tests
        using the same interface and the same run file placed in different
        locations.
    """

    #pylint: disable=too-few-public-methods
    class var_holder(object):
        """ A simple wrapper class to keep web variables"""
        def __init__(self, Web_vars=None):
            if Web_vars:
                self.standard_vars = Web_vars.standard_vars
                self.advanced_vars = Web_vars.advanced_vars
            else:
                self.standard_vars = None
                self.advanced_vars = None

        #

        def get_all_vars(self):
            """Return dictionary with all defined variables
               combined together
            """
            web_vars = {}
            if self.advanced_vars:
                web_vars = self.advanced_vars.copy()
            if self.standard_vars:
                if len(web_vars) > 0:
                    web_vars.update(self.standard_vars)
                else:
                    web_vars = self.standard_vars.copy()
            return web_vars

    def __init__(self, instrumentName, web_var=None):
        """ sets properties defaults for the instrument with Name
          and define if wrapper runs from web services or not
        """
        # internal variable, indicating if we should try to wait for input files to appear
        self._wait_for_file = False
        #The property defines the run number, to validate. If defined, switches reduction wrapper from
        #reduction to validation mode
        self._run_number_to_validate = None
        # internal variable, used in system tests to validate workflow,
        # with waiting for files.  It is the holder to the function
        # used during debugging "wait for files" workflow
        # instead of Pause algorithm
        self._debug_wait_for_files_operation = None
        # tolerance to change in some tests if default is not working well
        self._tolerr = None

        # The variables which are set up from web interface or to be exported to
        # web interface
        if web_var:
            self._run_from_web = True
        else:
            self._run_from_web = False
        self._wvs = ReductionWrapper.var_holder(web_var)
        # Initialize reduced for given instrument
        self.reducer = DirectEnergyConversion(instrumentName)
        #
        web_vars = self._wvs.get_all_vars()
        if web_vars:
            self.reducer.prop_man.set_input_parameters(**web_vars)

    @property
    def wait_for_file(self):
        """ If this variable set to positive value, this value
            is interpreted as time to wait until check for specified run file
            if this file have not been find immediately.

            if this variable is 0 or false and the file have not been found,
            reduction will fail
        """
        return self._wait_for_file

    @wait_for_file.setter
    def wait_for_file(self, value):
        if value > 0:
            self._wait_for_file = value
        else:
            self._wait_for_file = False
#

    def save_web_variables(self, FileName=None):
        """ Method to write simple and advanced properties and help
            information  into dictionary, to use by web reduction
            interface

            If no file is provided, reduce_var.py file will be written
            to the folder, containing current script

        """
        if not FileName:
            FileName = 'reduce_vars.py'

        f = open(FileName, 'w')
        f.write("standard_vars = {\n")
        str_wrapper = '         '
        for key, val in iteritems(self._wvs.standard_vars):
            if isinstance(val, string_types):
                row = "{0}\'{1}\':\'{2}\'".format(str_wrapper, key, val)
            else:
                row = "{0}\'{1}\':{2}".format(str_wrapper, key, val)
            f.write(row)
            str_wrapper = ',\n         '
        f.write("\n}\nadvanced_vars={\n")
        #print advances variables
        str_wrapper = '         '
        for key, val in iteritems(self._wvs.advanced_vars):
            if isinstance(val, string_types):
                row = "{0}\'{1}\':\'{2}\'".format(str_wrapper, key, val)
            else:
                row = "{0}\'{1}\':{2}".format(str_wrapper, key, val)
            f.write(row)
            str_wrapper = ',\n        '

        def write_help_block(fhandle, block_name, block_dict):
            str_wrapper = '         '
            row = "{0}\'{1}\' : {{\n".format(str_wrapper, block_name)
            fhandle.write(row)
            for key in block_dict:
                try:
                    prop = getattr(PropertyManager, key)
                    docstring = prop.__doc__
                    if not docstring:
                        continue
#pylint: disable=bare-except
                except:
                    continue
                contents = self._do_format(docstring)
                row = "{0}\'{1}\':\'{2}\'".format(str_wrapper, key, contents)
                fhandle.write(row)
                str_wrapper = ',\n        '
            fhandle.write('{0} }},\n'.format(str_wrapper))

        f.write("\n}\nvariable_help={\n")
        write_help_block(f, "standard_vars", self._wvs.standard_vars)
        write_help_block(f, "advanced_vars", self._wvs.advanced_vars)
        f.write("}\n")
        f.close()

    def _do_format(self, docstring):
        """Format docstring to write it as string in the reduce_var file"""
        contents = re.sub(" +", " ", docstring)
        contents = contents.split('\n')
        contents = '\\n'.join(contents)
        return contents

    @property
    def validate_run_number(self):
        """The property defines the run number to validate. If defined, switches reduction wrapper from
           reduction to validation mode, where reduction tries to load result, previously calculated,
           for this run and then compare this result with the result, defined earlier"""
        return self._run_number_to_validate

    @validate_run_number.setter
    def validate_run_number(self, val):
        if val is None:
            self._run_number_to_validate = None
        else:
            self._run_number_to_validate = int(val)

    def validate_settings(self):
        """ method validates initial parameters, provided for reduction"""
        self.def_advanced_properties()
        self.def_main_properties()
        if self._run_from_web:
            web_vars = self._wvs.get_all_vars()
            self.reducer.prop_man.set_input_parameters(**web_vars)
        else:
            pass  # we should already set up these variables using
            # def_main_properties & def_advanced_properties
        # validate properties and report result
        return self.reducer.prop_man.validate_properties(False)
#

    def validation_file_name(self):
        """ the name of the file, used as reference to
            validate the run, specified as the class property

            The method can be overloaded to return a workspace
            or workspace name to validate results against.
        """
        #pylint: disable=protected-access
        if PropertyManager.save_file_name._file_name is not None:
            #pylint: disable=protected-access
            file_name = PropertyManager.save_file_name._file_name
            if isinstance(file_name, api.Workspace):
                return file_name
        else:
            instr = self.reducer.prop_man.instr_name
            run_n = self.validate_run_number
            ei = PropertyManager.incident_energy.get_current()
            file_name = '{0}{1}_{2:<3.2f}meV_VALIDATION_file.nxs'.format(
                instr, run_n, ei)
        run_dir = self.validation_file_place()
        full_name = os.path.join(run_dir, file_name)
        return full_name

    def validation_file_place(self):
        """Redefine this to the place, where validation file, used in conjunction with
           'validate_run' property, located. Here it defines the place to this script folder.
           By default it looks for/places it in a default save directory"""
        return config['defaultsave.directory']

#

    def validate_result(self, Error=1.e-6, ToleranceRelErr=True):
        """Method to validate result against existing validation file
         or workspace

         Change this method to verify different results or validate results differently"""
        rez, message = ReductionWrapper.build_or_validate_result(
            self, Error, ToleranceRelErr)
        return rez, message
#

    def set_custom_output_filename(self):
        """ define custom name of output files if standard one is not satisfactory
          User expected to overload this method within class instantiation """
        return None

    def evaluate_abs_corrections(self, test_ws, spectra_to_correct):
        """ Evaluate absorption corrections from the input workspace
            Input:
            test_ws -- the workspace to calculate corrections for.
                       The corrections themselves should be defined by
                       the following data reduction properties:
                       propmen.correct_absorption_on = TheShapeOfTheSample -- define sample parameters
                       propmen.abs_corr_info   = {} Dictionary with additional correction parameters
                       (can be empty)
             spectra_to_correct -- list of the spectra to correct absorption for.
             If this list is empty, the corrections are calculated for the whole workspace,
             which can cause problems for plotting.

             Returns:
             corrections -- the workspace containing the absorption corrections
             for the spectra, specified in spectra_to_correct variable.
        """

        n_spectra = test_ws.getNumberHistograms()
        decrement = len(spectra_to_correct)
        if decrement > 0:
            red_ws = ExtractSpectra(test_ws,
                                    WorkspaceIndexList=spectra_to_correct)
        else:
            decrement = n_spectra

        prop_man = self.reducer.prop_man
        abs_shape = prop_man.correct_absorption_on
        start_time = time.time()
        ws, corrections = abs_shape.correct_absorption(red_ws,
                                                       prop_man.abs_corr_info)
        end_time = time.time()
        estimated_time = (end_time - start_time) * n_spectra / decrement
        prop_man.log(
            "**************************************************************************************************",
            'notice')
        prop_man.log(
            "*** Estimated time to run absorption corrections on the final workspace is: {0:.1f}sec"
            .format(estimated_time), 'notice')
        prop_man.log(
            "**************************************************************************************************",
            'notice')
        return (corrections, estimated_time)

#pylint: disable=too-many-branches

    def build_or_validate_result(self, Error=1.e-6, ToleranceRelErr=True):
        """ Method validates results of the reduction against reference file or workspace.

            Inputs:
            sample_run     -- the run number to reduce or validate against existing result
            validation_file -- The name of nxs file, containing workspace, produced by reducing SampleRun,
                              or the pointer to the workspace, which is the reference workspace
                              for SampleRun reduction.

            Returns:
            True   if reduction for sample_run produces result within Error from the reference file
                   as reported by CompareWorkspaces.
            False  if CheckWorkspaceMatch comparison between sample and reduction is unsuccessful

            True  if was not able to load reference file. In this case, algorithm builds validation
                  file and returns True if the reduction and saving of this file is successful

        """
        # this row defines location of the validation file
        validation_file = self.validation_file_name()
        sample_run = self.validate_run_number
        if isinstance(validation_file, string_types):
            path, name = os.path.split(validation_file)
            if name in mtd:
                reference_ws = mtd[name]
                build_validation = False
                fileName = "workspace:" + reference_ws.name()
            else:
                if len(path) > 0:
                    config.appendDataSearchDir(path)
                # it there bug in getFullPath? It returns the same string if given full path
                # but file has not been found
#pylint: disable=unused-variable
                name, fext = os.path.splitext(name)
                fileName = FileFinder.getFullPath(name + '.nxs')
                if len(fileName) > 0:
                    build_validation = False
                    try:
                        reference_ws = Load(fileName)
#pylint: disable=bare-except
                    except:
                        build_validation = True
                else:
                    build_validation = True
        elif isinstance(validation_file, api.Workspace):
            # its workspace:
            reference_ws = validation_file
            build_validation = False
            fileName = "workspace:" + reference_ws.name()
        else:
            build_validation = True
        #--------------------------------------------------------
        if build_validation:
            self.reducer.prop_man.save_file_name = validation_file
            self.reducer.prop_man.log\
                ("*** WARNING:can not find or load validation file {0}\n"
                 "    Building validation file for run N:{1}".format(validation_file,sample_run),'warning')
        else:
            self.reducer.prop_man.log\
                 ("*** FOUND VALIDATION FILE: {0}\n"
                  "    Validating run {1} against this file".format(fileName,sample_run),'warning')

        # just in case, to be sure
        current_web_state = self._run_from_web
        current_wait_state = self.wait_for_file
        # disable wait for input and
        self._run_from_web = False
        self.wait_for_file = False
        #
        self.def_advanced_properties()
        self.def_main_properties()
        #
        self.reducer.sample_run = sample_run
        self.reducer.prop_man.save_format = None

        reduced = self.reduce()

        if build_validation:
            self.reducer.prop_man.save_file_name = None
            result_name = os.path.splitext(validation_file)[0]
            self.reducer.prop_man.log(
                "*** Saving validation file with name: {0}.nxs".format(
                    result_name), 'notice')
            SaveNexus(reduced, Filename=result_name + '.nxs')
            return True, 'Created validation file {0}.nxs'.format(result_name)
        else:
            if isinstance(reduced,
                          list):  # check only first result in multirep
                reduced = reduced[0]
            # Cheat! Counterintuitive!
            if self._tolerr:
                TOLL = self._tolerr
            else:
                TOLL = Error
            result = CompareWorkspaces(Workspace1=reference_ws,
                                       Workspace2=reduced,
                                       Tolerance=TOLL,
                                       CheckSample=False,
                                       CheckInstrument=False,
                                       ToleranceRelErr=ToleranceRelErr)

        self.wait_for_file = current_wait_state
        self._run_from_web = current_web_state
        if result[0]:
            return True,'Reference file and reduced workspace are equal with accuracy {0:<3.2f}'\
                        .format(TOLL)
        else:
            fname, _ = os.path.splitext(fileName)
            filename = fname + '-mismatch.nxs'
            self.reducer.prop_man.log(
                "***WARNING: can not get results matching the reference file.\n"
                "   Saving new results to file {0}".format(filename),
                'warning')
            SaveNexus(reduced, Filename=filename)
            return False, result

    @abstractmethod
    def def_main_properties(self):
        """ Define properties which considered to be main properties changeable by user

            Should be overwritten by special reduction and decorated with  @MainProperties decorator.

            Should return dictionary with key are the properties names and values -- the default
            values these properties should have.
        """
        raise NotImplementedError('def_main_properties  has to be implemented')

    @abstractmethod
    def def_advanced_properties(self):
        """ Define properties which considered to be advanced but still changeable by instrument scientist or advanced user

            Should be overwritten by special reduction and decorated with  @AdvancedProperties decorator.

            Should return dictionary with key are the properties names and values -- the default
            values these properties should have.
        """

        raise NotImplementedError(
            'def_advanced_properties  has to be implemented')

    #

    def _run_pause(self, timeToWait=0):
        """ a wrapper around pause algorithm allowing to run something
            instead of pause in debug mode
        """

        if self._debug_wait_for_files_operation is not None:
            # it is callable and the main point of this method is that it is callable
            #pylint: disable=E1102
            self._debug_wait_for_files_operation()
        else:
            Pause(timeToWait)

    #

    def _check_access_granted(self, input_file):
        """ Check if the access to the found nxs file is granted

            Created to fix issue on ISIS archive, when file
            is copied through the network for ~2min and become available
            2 minutes after it has been found.
        """

        _, found_ext = os.path.splitext(input_file)
        if found_ext != '.nxs':  # problem solution for nxs files only. Others are seems ok
            return
        if not h5py_installed:  # well this check is not available. Sad, but it available on
            # all our working systems. Inform user about the problem
            self.reducer.prop_man.log \
                ('*** Can not verify if file is accessible. Install h5py to be able to check file access in waiting mode',
                 'notice')
            return
        ic = 0
        #ok = os.access(input_file,os.R_OK) # does not work in this case
        try:
            f = h5py.File(input_file, 'r')
            ok = True
        except IOError:
            ok = False
            while not ok:
                self.reducer.prop_man.log \
                    ('*** File found but access can not be gained. Waiting for 10 sec','notice')
                time.sleep(10)
                ic = ic + 1
                try:
                    f = h5py.File(input_file, 'r')
                    ok = True
                except IOError:
                    ok = False
                    if ic > 24:
                        raise IOError\
                            ("Can not get read access to input file: "+input_file+" after 4 min of trying")
        if ok:
            f.close()

    def reduce(self, input_file=None, output_directory=None):
        """ The method performs all main reduction operations over
            single run file

            Wrap it into @iliad wrapper to switch input for
            reduction properties between script and web variables
        """
        if input_file:
            # attribute-defined-outside-init -- wrong, it is not
            #pylint: disable=W0201
            self.reducer.sample_run = str(input_file)
        if output_directory:
            config['defaultsave.directory'] = str(output_directory)

        timeToWait = self._wait_for_file
        wait_counter = 0
        if timeToWait > 0:
            Found, input_file = PropertyManager.sample_run.find_file(
                self.reducer.prop_man, be_quet=True)
            while not Found:
                file_hint, fext = PropertyManager.sample_run.file_hint()
                self.reducer.prop_man.log(
                    "*** Waiting {0} sec for file {1} to appear on the data search path"
                    .format(timeToWait, file_hint), 'notice')

                self._run_pause(timeToWait)
                Found, input_file = PropertyManager.sample_run.find_file(
                    self.reducer.prop_man, file_hint=file_hint, be_quet=True)
                if Found:
                    _, found_ext = os.path.splitext(input_file)
                    if found_ext != fext:
                        wait_counter += 1
                        if wait_counter < 2:
                            timeToWait = 60
                            self.reducer.prop_man.log(
                                "*** Requested file with extension {0} but found one with extension {1}\n"
                                "    The target may not have been delivered from the DAE machine\n"
                                .format(fext, found_ext))
                            Found = False
                        else:
                            wait_counter = 0
                    else:
                        pass
                else:
                    pass  # not found, wait more
            #endWhile
            # found but let's give it some time to finish possible IO operations
            self._check_access_granted(input_file)
            #
            converted_to_energy_transfer_ws = self.reducer.convert_to_energy(
                None, input_file)

        else:
            converted_to_energy_transfer_ws = self.reducer.convert_to_energy(
                None, input_file)

        return converted_to_energy_transfer_ws

    #

    def sum_and_reduce(self):
        """ procedure used to sum and reduce runs in case when not all files
           are available and user have to wait for these files to appear
       """
        #pylint: disable=protected-access
        if not PropertyManager.sample_run._run_list:
            raise RuntimeError(
                "sum_and_reduce expects run file list to be defined")

        self.reducer.prop_man.sum_runs = True

        timeToWait = self._wait_for_file
        self._wait_for_file = 0
        if timeToWait > 0:
            run_files = PropertyManager.sample_run.get_run_list()
            num_files_to_sum = len(PropertyManager.sample_run)

            ok, missing, found = self.reducer.prop_man.find_files_to_sum()
            n_found = len(found)
            if not ok:
                # necessary to cache intermediate sums in memory
                self.reducer.prop_man.cashe_sum_ws = True
            while not ok:
                while n_found > 0:
                    last_found = found[-1]
                    self.reducer.prop_man.sample_run = last_found  # request to reduce all up to last found
                    # Note that here we run convert to energy instead of user (may be) reloaded reduction!
                    # This would cause problem for user-defined reduction, which pre-process rather than
                    # post-process resulting workspace
                    ws = self.reducer.convert_to_energy()
                    # reset search to whole file list again
                    self.reducer.prop_man.sample_run = run_files[
                        num_files_to_sum - 1]
                    ok, missing, found = self.reducer.prop_man.find_files_to_sum(
                    )
                    n_found = len(found)
                    if ok:  # no need to cache sum any more.  All necessary files found
                        self.reducer.prop_man.cashe_sum_ws = False

                self.reducer.prop_man.log(
                    "*** Waiting {0} sec for runs {1} to appear on the data search path"
                    .format(timeToWait, str(missing)), 'notice')
                self._run_pause(timeToWait)
                ok, missing, found = self.reducer.prop_man.find_files_to_sum()
                n_found = len(found)
        #end not(ok)
            if n_found > 0:
                # cash sum can be dropped now if it has not been done before
                self.reducer.prop_man.cashe_sum_ws = False
                for run in found:
                    # here we have run numbers. Let's get real file names
                    prop_man = self.reducer.prop_man
                    instr_name = prop_man.short_instr_name
                    is_found, fname = PropertyManager.sample_run.find_file(
                        prop_man, instr_name, run)
                    if not is_found:
                        raise RuntimeError(
                            "File has been found earlier but can not been retrieved now. Logical bug"
                        )
                    else:
                        # found but let's give it some time to finish possible IO operations
                        self._check_access_granted(fname)
                ws = self.reduce()
        else:
            ws = self.reduce()
        self._wait_for_file = timeToWait
        return ws

    #

    def run_reduction(self):
        """" Reduces runs one by one or sum all them together and reduce after this

            if wait_for_file time is > 0, it will until  missing files appear on the
            data search path
        """
        try:
            _, r = funcinspect.lhs_info('both')
            out_ws_name = r[0]


# no-exception-type(s) specified. Who knows what exception this internal procedure rises...
#pylint: disable=W0702
        except:
            out_ws_name = None

        # if this is not None, we want to run validation not reduction
        if self.validate_run_number:
            self.reducer.prop_man.log\
                ("**************************************************************************************",'warning')
            self.reducer.prop_man.log\
                ("**************************************************************************************",'warning')
            rez, mess = self.build_or_validate_result()
            if rez:
                self.reducer.prop_man.log("*** SUCCESS! {0}".format(mess))
                self.reducer.prop_man.log\
                    ("**************************************************************************************",'warning')

            else:
                self.reducer.prop_man.log(
                    "*** VALIDATION FAILED! {0}".format(mess))
                self.reducer.prop_man.log\
                    ("**************************************************************************************",'warning')
                raise RuntimeError("Validation against old data file failed")
            self.validate_run_number = None
            return rez, mess

        if self.reducer.sum_runs:
            # --------### sum runs provided ------------------------------------###
            if out_ws_name is None:
                self.sum_and_reduce()
                return None
            else:
                red_ws = self.sum_and_reduce()
                RenameWorkspace(InputWorkspace=red_ws,
                                OutputWorkspace=out_ws_name)
                return mtd[out_ws_name]
        else:
            # --------### reduce list of runs one by one ----------------------------###
            runfiles = PropertyManager.sample_run.get_run_file_list()
            if out_ws_name is None:
                for file_name in runfiles:
                    self.reduce(file_name)
                return None
            else:
                results = []
                nruns = len(runfiles)
                for num, file_name in enumerate(runfiles):
                    red_ws = self.reduce(file_name)
                    if isinstance(red_ws, list):
                        for ws in red_ws:
                            results.append(ws)
                    else:
                        if nruns == 1:
                            if red_ws.name() != out_ws_name:
                                RenameWorkspace(InputWorkspace=red_ws,
                                                OutputWorkspace=out_ws_name)
                            results.append(mtd[out_ws_name])
                        else:
                            OutWSName = '{0}#{1}of{2}'.format(
                                out_ws_name, num + 1, nruns)
                            if red_ws.name() != out_ws_name:
                                RenameWorkspace(InputWorkspace=red_ws,
                                                OutputWorkspace=OutWSName)
                            results.append(mtd[OutWSName])
                #end
                if len(results) == 1:
                    return results[0]
                else:
                    return results
    def test_abs_multirep_with_bkg_and_bleed(self):
        # create test workspace
        run_monitors=CreateSampleWorkspace(Function='Multiple Peaks', NumBanks=4, BankPixelWidth=1,\
                                            NumEvents=100000, XUnit='Energy', XMin=3, XMax=200, BinWidth=0.1)
        LoadInstrument(run_monitors,InstrumentName='MARI', RewriteSpectraMap=True)
        ConvertUnits(InputWorkspace='run_monitors', OutputWorkspace='run_monitors', Target='TOF')
        run_monitors = mtd['run_monitors']
        tof = run_monitors.dataX(3)
        tMin = tof[0]
        tMax = tof[-1]
        run = CreateSampleWorkspace( Function='Multiple Peaks',WorkspaceType='Event',NumBanks=8, BankPixelWidth=1,\
                                     NumEvents=100000, XUnit='TOF',xMin=tMin,xMax=tMax)
        LoadInstrument(run,InstrumentName='MARI', RewriteSpectraMap=True)
        AddSampleLog(run,LogName='gd_prtn_chrg',LogText='1.',LogType='Number')
        run.setMonitorWorkspace(run_monitors)

        # build "monovanadium"
        mono = CloneWorkspace(run)
        mono_monitors = CloneWorkspace(run_monitors)
        mono.setMonitorWorkspace(mono_monitors)

        # build "White-beam"
        wb_ws   = Rebin(run,Params=[tMin,1,tMax],PreserveEvents=False)

        # build "second run" to ensure repeated execution
        run2 = CloneWorkspace(run)
        run2_monitors = CloneWorkspace(run_monitors)
        run2.setMonitorWorkspace(run2_monitors)

        # Run multirep
        tReducer = DirectEnergyConversion(run.getInstrument())
        tReducer.prop_man.run_diagnostics=True 
        tReducer.hard_mask_file=None
        tReducer.map_file=None
        tReducer.prop_man.check_background = True
        tReducer.prop_man.background_range=[0.99*tMax,tMax]
        tReducer.prop_man.monovan_mapfile=None
        tReducer.save_format=None
        tReducer.prop_man.normalise_method='monitor-2'

        tReducer.prop_man.bleed = True
        tReducer.norm_mon_integration_range=[tMin,tMax]

        AddSampleLog(run,LogName='good_frames',LogText='1.',LogType='Number Series')
        result = tReducer.convert_to_energy(wb_ws,run,[67.,122.],[-2,0.02,0.8],None,mono)

        self.assertEqual(len(result),2)

        ws1=result[0]
        self.assertEqual(ws1.getAxis(0).getUnit().unitID(),'DeltaE')
        x = ws1.readX(0)
        self.assertAlmostEqual(x[0],-2*67.)
        self.assertAlmostEqual(x[-1],0.8*67.)

        ws2=result[1]
        self.assertEqual(ws2.getAxis(0).getUnit().unitID(),'DeltaE')
        x = ws2.readX(0)
        self.assertAlmostEqual(x[0],-2*122.)
        self.assertAlmostEqual(x[-1],0.8*122.)

        # test another ws
        # rename samples from previous workspace to avoid deleting them on current run
        for ind,item in enumerate(result):
            result[ind]=RenameWorkspace(item,OutputWorkspace='SampleRez#'+str(ind))
        #
        AddSampleLog(run2,LogName='goodfrm',LogText='1',LogType='Number')
        result2 = tReducer.convert_to_energy(None,run2)

        rez = CompareWorkspaces(result[0],result2[0])
        self.assertTrue(rez[0])
        rez = CompareWorkspaces(result[1],result2[1])
        self.assertTrue(rez[0])
Beispiel #29
0
class ReductionWrapper(object):
    """ Abstract class provides interface to direct inelastic reduction 
        allowing it to be run  from Mantid, web services, or system tests 
        using the same interface and the same run file placed in different 
        locations.
    """ 
    class var_holder(object):
        """ A simple wrapper class to keep web variables"""
        def __init__(self):
            self.standard_vars = None
            self.advanced_vars = None
            pass

    def __init__(self,instrumentName,web_var=None):
      """ sets properties defaults for the instrument with Name 
          and define if wrapper runs from web services or not
      """
      # internal variable, indicating if we should try to wait for input files to appear
      self._wait_for_file=False

      # The variables which are set up from web interface or to be exported to 
      # web interface
      if web_var: 
        self._run_from_web = True
        self._wvs = web_var
      else:
        self._run_from_web = False
        self._wvs = ReductionWrapper.var_holder()
      # Initialize reduced for given instrument
      self.reducer = DirectEnergyConversion(instrumentName)

      self._validation_fname=None
#
    def get_validation_file_name(self,ReferenceFile=None):
      """ function provides name of the file with mantid
          workspace reduced earlier and which should be validated 
          against results of current reduction

          Should be overloaded to return real file name for particular
          reduction
      """ 
      if ReferenceFile:
          self._validation_fname = ReferenceFile
      return self._validation_fname

    @property
    def wait_for_file(self):
        """ If this variable set to positive value, this value
            is interpreted as time to wait until check for specified run file 
            if this file have not been find immediately. 

            if this variable is 0 or false and the the file have not been found,
            reduction will fail
        """ 
        return self._wait_for_file
    @wait_for_file.setter
    def wait_for_file(self,value):
        if value>0:
            self._wait_for_file = value
        else:
            self._wait_for_file = False
#
    def save_web_variables(self,FileName=None):
        """ Method to write simple and advanced properties and help 
            information  into dictionary, to use by web reduction
            interface

            If no file is provided, reduce_var.py file will be written 
            to 

        """
        if not FileName:
            FileName = 'reduce_vars.py'
       
        f=open(FileName,'w')
        f.write("standard_vars = {\n")
        str_wrapper = '         '
        for key,val in self._wvs.standard_vars.iteritems():
                  if isinstance(val,str):
                      row = "{0}\'{1}\':\'{2}\'".format(str_wrapper,key,val)
                  else:
                      row = "{0}\'{1}\':{2}".format(str_wrapper,key,val)
                  f.write(row)
                  str_wrapper=',\n         '
        f.write("\n}\nadvanced_vars={\n")

        str_wrapper='         '
        for key,val in self._wvs.advanced_vars.iteritems():
                  if isinstance(val,str):
                      row = "{0}\'{1}\':\'{2}\'".format(str_wrapper,key,val)
                  else:
                      row = "{0}\'{1}\':{2}".format(str_wrapper,key,val)
                  f.write(row)
                  str_wrapper=',\n        '
        f.write("\n}\n")
        f.close()

#
#   
    def validate_result(self,build_validation=False,Error=1.e-3,ToleranceRelErr=True):
        """ Overload this using build_or_validate_result to have possibility to run or validate result """ 
        return True

    def build_or_validate_result(self,sample_run,validationFile,build_validation=False,Error=1.e-3,ToleranceRelErr=True):
        """ Method validates results of the reduction against reference file provided
            by get_validation_file_name() method 
            
            At the moment, get_validation_file_name method should return the name of a file,
            where workspace sample reduced workspace with default properties 
            is stored. 
            CheckWorkspaceMatch method is applied to verify if current reduced workspace is 
            equivalent to the workspace, stored in the reference file. 
        """

        if not build_validation:
           if validationFile:
              sample = Load(validationFile)
           else:
              build_validation=True


        # just in case, to be sure
        current_web_state = self._run_from_web
        current_wait_state= self.wait_for_file
        # disable wait for input and 
        self._run_from_web = False
        self.wait_for_file = False
        #
        self.def_advanced_properties()
        self.def_main_properties()
        #
        self.reducer.sample_run = sample_run
        self.reducer.prop_man.save_format=None

        reduced = self.reduce()

        if build_validation:
            if validationFile:
               result_name = os.path.splitext(validationFile)[0]
            else:
               result_name = self.reducer.prop_man.save_file_name
            self.reducer.prop_man.log("*** Saving validation file with name: {0}.nxs".format(result_name),'notice')
            SaveNexus(reduced,Filename=result_name+'.nxs')
            return True,'Created validation file {0}.nxs'.format(result_name)
        else:
            result = CheckWorkspacesMatch(Workspace1=sample,Workspace2=reduced,
                                      Tolerance=Error,CheckSample=False,
                                      CheckInstrument=False,ToleranceRelErr=ToleranceRelErr)

        self.wait_for_file = current_wait_state
        self._run_from_web = current_web_state 
        if result == 'Success!':
            return True,'Reference file and reduced workspace are equivalent'
        else:
            return False,result

    @abstractmethod
    def def_main_properties(self):
        """ Define properties which considered to be main properties changeable by user
            
            Should be overwritten by special reduction and decorated with  @MainProperties decorator. 

            Should return dictionary with key are the properties names and values -- the default 
            values these properties should have.
        """ 
        raise NotImplementedError('def_main_properties  has to be implemented')
    @abstractmethod
    def def_advanced_properties(self):
        """ Define properties which considered to be advanced but still changeable by instrument scientist or advanced user
            
            Should be overwritten by special reduction and decorated with  @AdvancedProperties decorator. 

            Should return dictionary with key are the properties names and values -- the default 
            values these properties should have.
        """ 

        raise NotImplementedError('def_advanced_properties  has to be implemented')


    def reduce(self,input_file=None,output_directory=None):
        """ The method performs all main reduction operations over 
            single run file
            
            Wrap it into @iliad wrapper to switch input for 
            reduction properties between script and web variables
        """ 
        if input_file:
           self.reducer.sample_run = input_file

        timeToWait = self._wait_for_file
        if timeToWait:
            file = PropertyManager.sample_run.find_file(be_quet=True)
            while file.find('ERROR:')>=0:
                file_hint,fext = PropertyManager.sample_run.file_hint()
                self.reducer.prop_man.log("*** Waiting {0} sec for file {1} to appear on the data search path"\
                    .format(timeToWait,file_hint),'notice')
                Pause(timeToWait)
                file = PropertyManager.sample_run.find_file(be_quet=True)
            ws = self.reducer.convert_to_energy(None,input_file)

        else:
            ws = self.reducer.convert_to_energy(None,input_file)

        return ws