Exemplo n.º 1
0
    def test_heterogeneous_bin(self):
        run_algorithm('CreateMDWorkspace', Dimensions='3',Extents='0,10,0,10,0,10',Names='x,y,z',Units='m,m,m',SplitInto='10',
                      MaxRecursionDepth='1',OutputWorkspace='mdwHW')
        #see BinMDTest::testExecLagreRegularSignal for C++ version of this test
        run_algorithm('FakeMDEventData', InputWorkspace="mdwHW",  UniformParams="-1000,0.5,1,0.5,1,0.5,1")
        SH = mtd['mdwHW']
        nEvents = SH.getNPoints();
        self.assertEqual(nEvents,1000);
        run_algorithm('BinMD',InputWorkspace="mdwHW", OutputWorkspace="BH", AxisAligned=True, AlignedDim0="x,0,10,20", AlignedDim1="y,0,10,5",
                      AlignedDim2="z,0,10,40", IterateEvents="1", Parallel="0")
        BH = mtd['BH']
        nEvents = BH.getNEvents();
        self.assertEqual(nEvents,1000);
        signal = BH.getSignalArray()
        expected =(long(20), long(5), long(40))
        shape = signal.shape
        self.assertEqual(shape,expected)

        for i in range(0,expected[1]):
            self.assertEqual(signal[1,i,2],2)
            self.assertEqual(signal[2,i,1],0)


        self.assertEqual(BH.signalAt(3+20*(2+5*1)),signal[1,2,3])
        self.assertEqual(BH.signalAt(4+20*(3+5*2)),signal[2,3,4])
        mtd.remove('BH')
        mtd.remove('mdwHW')
Exemplo n.º 2
0
    def test_heterogeneous_bin(self):
        run_algorithm('CreateMDWorkspace', Dimensions='3',Extents='0,10,0,10,0,10',Names='x,y,z',Units='m,m,m',SplitInto='10',
                      MaxRecursionDepth='1',OutputWorkspace='mdwHW')
        #see BinMDTest::testExecLagreRegularSignal for C++ version of this test
        run_algorithm('FakeMDEventData', InputWorkspace="mdwHW",  UniformParams="-1000,0.5,1,0.5,1,0.5,1")
        SH = mtd['mdwHW']
        nEvents = SH.getNPoints();
        self.assertEqual(nEvents,1000);
        run_algorithm('BinMD',InputWorkspace="mdwHW", OutputWorkspace="BH", AxisAligned=True, AlignedDim0="x,0,10,20", AlignedDim1="y,0,10,5",
                      AlignedDim2="z,0,10,40", IterateEvents="1", Parallel="0")
        BH = mtd['BH']
        nEvents = BH.getNEvents();
        self.assertEqual(nEvents,1000);
        signal = BH.getSignalArray()
        expected =(40L,5L,20L)
        shape = signal.shape
        self.assertEqual(shape,expected)

        for i in range(0,expected[1]):
    		self.assertEqual(signal[2,i,1],2)
    		self.assertEqual(signal[1,i,2],0)


        self.assertEqual(BH.signalAt(3+20*(2+5*1)),signal[1,2,3])
        self.assertEqual(BH.signalAt(4+20*(3+5*2)),signal[2,3,4])
        mtd.remove('BH')
        mtd.remove('mdwHW')
    def test_importAll_creates_variable_in_current_global_dict_pointing_to_each_workspace(self):
        obj_names = mtd.getObjectNames()
        extra_names = ["ADSTest_test_1", "ADSTest_test_2", "ADSTest_test_3"]
        for name in extra_names:
            self._run_createws(name)
        obj_names += extra_names
        
        # Check no names are in globals
        for name in obj_names:
            self.assertFalse(name in locals())

        # Pull in variables
        mtd.importAll()
        # Are they in the local namespace
        for name in obj_names:
            self.assertTrue(name in locals())
        
        # Clean up
        for name in obj_names:
            try:
                del locals()[name]
            except KeyError:
                pass
        for name in extra_names:
            mtd.remove(name)
    def test_importAll_creates_variable_in_current_global_dict_pointing_to_each_workspace(self):
        obj_names = mtd.getObjectNames()
        extra_names = ["ADSTest_test_1", "ADSTest_test_2", "ADSTest_test_3"]
        for name in extra_names:
            self._run_createws(name)
        obj_names += extra_names

        # Check no names are in globals
        for name in obj_names:
            self.assertFalse(name in locals())

        # Pull in variables
        mtd.importAll()
        # Are they in the local namespace
        for name in obj_names:
            self.assertTrue(name in locals())

        # Clean up
        for name in obj_names:
            try:
                del locals()[name]
            except KeyError:
                pass
        for name in extra_names:
            mtd.remove(name)
Exemplo n.º 5
0
    def test_errorSquared_array_is_wrapped_in_read_only_numpy_array(self):
        run_algorithm('CreateMDHistoWorkspace', SignalInput='1,2,3,4,5,6,7,8,9',ErrorInput='1,1,1,1,1,1,1,1,1',
                      Dimensionality='2',Extents='-1,1,-1,1',NumberOfBins='3,3',Names='A,B',Units='U,T',OutputWorkspace='demo')
        testWS = mtd['demo']
        errors = testWS.getErrorSquaredArray()
        expected = numpy.array([[1,1,1],[1,1,1],[1,1,1]])
        self._verify_numpy_data(errors, expected)

        mtd.remove('demo')
Exemplo n.º 6
0
 def test_composed_bin(self):
     run_algorithm('BinMD',InputWorkspace="mdw", OutputWorkspace="BH", AxisAligned=True, AlignedDim0="x,0,10,20", AlignedDim1="y,0,10,1",
                    IterateEvents="1", Parallel="0")
     BH = mtd['BH']
     signal = BH.getSignalArray()
     expected = (long(20), long(1))
     shape = signal.shape
     self.assertEqual(shape,expected)
     mtd.remove('BH')
Exemplo n.º 7
0
 def test_setSignalAt_throws_if_index_is_invalid(self):
     run_algorithm('CreateMDHistoWorkspace', SignalInput='1,2,3,4,5,6,7,8,9',ErrorInput='1,1,1,1,1,1,1,1,1',
                   Dimensionality='2',Extents='-1,1,-1,1',NumberOfBins='3,3',Names='A,B',Units='U,T',OutputWorkspace='demo')
     testWS = mtd['demo']
     index = testWS.getLinearIndex(3, 3)
     self.assertRaises(ValueError, testWS.setSignalAt, index, 1.0)
     index = testWS.getLinearIndex(0, 3)
     self.assertRaises(ValueError, testWS.setSignalAt, index, 1.0)
     mtd.remove('demo')
 def _cloneTestWorkspace(self, wsName=None):
     if not wsName:
         # Cannot use as default parameter as 'self' is not know in argument list.
         wsName = self._TEST_WS_NAME
     tempName = 'temp_testWS_'
     mtd.addOrReplace(tempName, self._testIN5WS)
     ws = CloneWorkspace(InputWorkspace=tempName, OutputWorkspace=wsName)
     mtd.remove(tempName)
     return ws
Exemplo n.º 9
0
 def test_setSignalAt_throws_if_index_is_invalid(self):
     run_algorithm('CreateMDHistoWorkspace', SignalInput='1,2,3,4,5,6,7,8,9',ErrorInput='1,1,1,1,1,1,1,1,1',
                   Dimensionality='2',Extents='-1,1,-1,1',NumberOfBins='3,3',Names='A,B',Units='U,T',OutputWorkspace='demo')
     testWS = mtd['demo']
     index = testWS.getLinearIndex(3, 3)
     self.assertRaises(ValueError, testWS.setSignalAt, index, 1.0)
     index = testWS.getLinearIndex(0, 3)
     self.assertRaises(ValueError, testWS.setSignalAt, index, 1.0)
     mtd.remove('demo')
Exemplo n.º 10
0
    def test_errorSquared_array_is_wrapped_in_read_only_numpy_array(self):
        run_algorithm('CreateMDHistoWorkspace', SignalInput='1,2,3,4,5,6,7,8,9',ErrorInput='1,1,1,1,1,1,1,1,1',
                      Dimensionality='2',Extents='-1,1,-1,1',NumberOfBins='3,3',Names='A,B',Units='U,T',OutputWorkspace='demo')
        testWS = mtd['demo']
        errors = testWS.getErrorSquaredArray()
        expected = numpy.array([[1,1,1],[1,1,1],[1,1,1]])
        self._verify_numpy_data(errors, expected)

        mtd.remove('demo')
Exemplo n.º 11
0
 def test_composed_bin(self):
     run_algorithm('BinMD',InputWorkspace="mdw", OutputWorkspace="BH", AxisAligned=True, AlignedDim0="x,0,10,20", AlignedDim1="y,0,10,1",
                    IterateEvents="1", Parallel="0")
     BH = mtd['BH']
     signal = BH.getSignalArray()
     expected =(1L, 20L)
     shape = signal.shape
     self.assertEqual(shape,expected)
     mtd.remove('BH')
 def _cloneTestWorkspace(self, wsName=None):
     if not wsName:
         # Cannot use as default parameter as 'self' is not know in argument list.
         wsName = self._TEST_WS_NAME
     tempName = 'temp_testWS_'
     mtd.addOrReplace(tempName, self._testIN5WS)
     ws = CloneWorkspace(InputWorkspace=tempName,
                         OutputWorkspace=wsName)
     mtd.remove(tempName)
     return ws
Exemplo n.º 13
0
    def __call__(self, vesuvio_input, iteration, verbose_output=False):
        vesuvio_output = VesuvioTOFFitOutput(
            lambda index: vesuvio_input.sample_data.getSpectrum(
                index).getSpectrumNo())

        if vesuvio_input.using_back_scattering_spectra:
            fit_profile_collection = self._mass_profile_collection.filter(
                ignore_hydrogen_filter)
        else:
            fit_profile_collection = self._mass_profile_collection

        all_mass_values = self._mass_profile_collection.masses
        fit_mass_values = fit_profile_collection.masses

        for index in range(vesuvio_input.spectra_number):
            self._fit_namer.set_index(index)
            all_profiles = ";".join(
                self._mass_profile_collection.functions(index))
            fit_profiles = ";".join(fit_profile_collection.functions(index))

            # Calculate pre-fit to retrieve parameter approximations for corrections
            prefit_result = self._prefit(vesuvio_input.sample_data, index,
                                         fit_mass_values, fit_profiles)

            # Calculate corrections
            corrections_result = self._corrections(
                vesuvio_input.sample_data, vesuvio_input.container_data, index,
                all_mass_values, all_profiles, prefit_result[1],
                verbose_output)
            # Calculate final fit
            fit_result = self._final_fit(corrections_result[-1],
                                         fit_mass_values, fit_profiles)
            # Update output with results from fit
            _update_output(vesuvio_output, prefit_result, corrections_result,
                           fit_result)

            # Clear ADS of intermediate workspaces and workspace group
            if verbose_output:
                UnGroupWorkspace(corrections_result[0])
                UnGroupWorkspace(corrections_result[1])
            mtd.remove(prefit_result[1].name())
            mtd.remove(fit_result[1].name())

        return vesuvio_output
Exemplo n.º 14
0
    def __call__(self, vesuvio_input, iteration, verbose_output=False):
        vesuvio_output = VesuvioTOFFitOutput(lambda index:
                                             vesuvio_input.sample_data.getSpectrum(index).getSpectrumNo())

        if vesuvio_input.using_back_scattering_spectra:
            fit_profile_collection = self._mass_profile_collection.filter(ignore_hydrogen_filter)
        else:
            fit_profile_collection = self._mass_profile_collection

        all_mass_values = self._mass_profile_collection.masses
        fit_mass_values = fit_profile_collection.masses

        for index in range(vesuvio_input.spectra_number):
            self._fit_namer.set_index(index)
            all_profiles = ";".join(self._mass_profile_collection.functions(index))
            fit_profiles = ";".join(fit_profile_collection.functions(index))

            # Calculate pre-fit to retrieve parameter approximations for corrections
            prefit_result = self._prefit(vesuvio_input.sample_data, index, fit_mass_values, fit_profiles)

            # Calculate corrections
            corrections_result = self._corrections(vesuvio_input.sample_data, vesuvio_input.container_data, index,
                                                   all_mass_values, all_profiles, prefit_result[1], verbose_output)

            # Calculate final fit
            fit_result = self._final_fit(corrections_result[-1], fit_mass_values, fit_profiles)

            # Update output with results from fit
            _update_output(vesuvio_output, prefit_result, corrections_result, fit_result)

            # Clear ADS of intermediate workspaces and workspace group
            if verbose_output:
                UnGroupWorkspace(corrections_result[0])
                UnGroupWorkspace(corrections_result[1])
            mtd.remove(prefit_result[1].getName())
            mtd.remove(corrections_result[-1].getName())
            mtd.remove(fit_result[1].getName())

        return vesuvio_output
Exemplo n.º 15
0
    def PyExec(self):   
        
        import os
        import numpy
        import math
        from reduction.instruments.reflectometer import wks_utility
        
        from mantid import mtd
        #remove all previous workspaces
        list_mt = mtd.getObjectNames()
        for _mt in list_mt:
            if _mt.find('_scaled') != -1:
                mtd.remove(_mt)
            if _mt.find('_reflectivity') != -1:
                mtd.remove(_mt)
        from mantidsimple import mtd    

        bDebug = False
        if bDebug:
            print '====== Running in mode DEBUGGING ======='

        run_numbers = self.getProperty("RunNumbers")
        if bDebug:
            print 'run_numbers (before getSequenceRuns): ' 
            print str(run_numbers)
            print
        run_numbers = wks_utility.getSequenceRuns(run_numbers)
        if bDebug:
            print 'run_numbers (after getSequenceRuns): ' 
            print str(run_numbers)
            print
            
        for _run in run_numbers:
        
            #make sure we are working with integer
            _run = int(_run)
        
            print '********* Working with run: ' + str(_run) + ' *********'

            #Pick a good workspace name
            ws_name = "refl%d" % _run
            ws_event_data = ws_name+"_evt"  

            try:
                data_file = FileFinder.findRuns("REF_L%d" %_run)[0]
                if bDebug:
                    print 'DEBUG: full file name is ' + data_file
            except RuntimeError:
                msg = "RefLReduction: could not find run %d\n" % _run
                msg += "Add your data folder to your User Data Directories in the File menu"
                if bDebug:
                    print 'DEBUG: file name could not be found !'
                raise RuntimeError(msg)
                
            if not mtd.workspaceExists(ws_event_data):
                LoadEventNexus(Filename=data_file, 
                               OutputWorkspace=ws_event_data)

            #retrieve list of metadata
            mt_run = mtd[ws_event_data].getRun()

            #run_title
            run_title = mt_run.getProperty('run_title').value
            _line = ' Run title: ' + run_title
            print _line
            
            #run_start
            run_start = mt_run.getProperty('run_start').value
            _line = ' Run start: ' + run_start
            print _line
            
            #duration
            duration_value = mt_run.getProperty('duration').value
            duration_units = mt_run.getProperty('duration').units
            _line = ' Duration: {0:.2f}'.format(duration_value)
            _line += ' ' + duration_units
            print _line
            
            #Lambda Requested
            lambda_request_value = mt_run.getProperty('LambdaRequest').value[0]
            lambda_request_units = mt_run.getProperty('LambdaRequest').units
            _line = ' Lambda requested: {0:.2f}'.format(lambda_request_value)
            _line += ' ' + lambda_request_units
            print _line
            
            #tthd
            tthd_value = mt_run.getProperty('tthd').value[0]
            tthd_units = mt_run.getProperty('tthd').units
            _line = ' tthd: {0:.4f}'.format(tthd_value)
            _line += ' ' + tthd_units
            print _line
           
            #thi
            thi_value = mt_run.getProperty('thi').value[0]
            thi_units = mt_run.getProperty('thi').units
            _line = ' thi: {0:.4f}'.format(thi_value)
            _line += ' ' + thi_units
            print _line
            
            #(tthd-thi)/2
            _cal = (float(tthd_value)-float(thi_value))/2.
            _line = ' (tthd-thi)/2: {0:.2f}'.format(_cal)
            _line += ' ' + thi_units
            print _line
            
            #ths
            ths_value = mt_run.getProperty('ths').value[0]
            ths_units = mt_run.getProperty('ths').units
            _line = ' ths: {0:.4f}'.format(ths_value)
            _line += ' ' + ths_units
            print _line
            
            #s1h
            s1h_value, s1h_units = wks_utility.getS1h(mtd[ws_event_data])
            _line = ' s1h: {0:.4f}'.format(s1h_value)
            _line += ' ' + s1h_units
            print _line
            
            #s2h
            s2h_value, s2h_units = wks_utility.getS2h(mtd[ws_event_data])
            _line = ' s2h: {0:.4f}'.format(s2h_value)
            _line += ' ' + s2h_units
            print _line
            
            #s1w
            s1w_value, s1w_units = wks_utility.getS1w(mtd[ws_event_data])
            _line = ' s1w: {0:.4f}'.format(s1w_value)
            _line += ' ' + s1w_units
            print _line
            
            #s2w
            s2w_value, s2w_units = wks_utility.getS2w(mtd[ws_event_data])
            _line = ' s2w: {0:.4f}'.format(s2w_value)
            _line += ' ' + s2w_units
            print _line

            print '********************************'
            print 
Exemplo n.º 16
0
    def PyExec(self):   
        
        import os
        import numpy
        import math
        from reduction.instruments.reflectometer import wks_utility
        
        from mantid import mtd
        #remove all previous workspaces
        list_mt = mtd.getObjectNames()
        for _mt in list_mt:
            if _mt.find('_scaled') != -1:
                mtd.remove(_mt)
            if _mt.find('_reflectivity') != -1:
                mtd.remove(_mt)
            
        from mantidsimple import mtd    

        bDebug = True
        if bDebug:
            print '====== Running in mode DEBUGGING ======='

        run_numbers = self.getProperty("RunNumbers")

        backSubMethod = 2   #1 uses RefRoi, 2 used own method

        mtd.sendLogMessage("RefLReduction: processing %s" % run_numbers)

        #run with normalization or not    
        NormFlag = self.getProperty("NormFlag")
        
        normalization_run = self.getProperty("NormalizationRunNumber")

        data_peak = self.getProperty("SignalPeakPixelRange")
        data_back = self.getProperty("SignalBackgroundPixelRange")

        # TOF range to consider
        TOFrangeFlag = self.getProperty("TofRangeFlag")
        if (TOFrangeFlag):
            TOFrange = self.getProperty("TOFRange") #microS
        else:
            TOFrange = [0, 200000]
            
        # Steps for TOF rebin
        TOFsteps = 100.0

        #use now a global q binning (user does not have control over it)
        #q_min = 0.005
        #q_step = -0.01

        # Q binning for output distribution
        q_min = self.getProperty("QMin")
        q_step = self.getProperty("QStep")
        if (q_step > 0):
            q_step = -q_step
        
        #dimension of the detector (256 by 304 pixels)
        maxX = 304
        maxY = 256
                
        #Due to the frame effect, it's sometimes necessary to narrow the range
        #over which we add all the pixels along the low resolution
        #Parameter
        data_low_res_flag = self.getProperty("LowResDataAxisPixelRangeFlag")
        if data_low_res_flag:
            data_low_res = self.getProperty("LowResDataAxisPixelRange")
        else:
            data_low_res = [0,maxX-1]

        norm_low_res_flag = self.getProperty("LowResNormAxisPixelRangeFlag")
        if norm_low_res_flag:
            norm_low_res = self.getProperty("LowResNormAxisPixelRange")
        else:
            norm_low_res = [0,maxX-1]
                
        h = 6.626e-34  #m^2 kg s^-1
        m = 1.675e-27     #kg

        norm_back = self.getProperty("NormBackgroundPixelRange")
        norm_peak = self.getProperty("NormPeakPixelRange")

        subtract_data_bck = self.getProperty("SubtractSignalBackground")
        subtract_norm_bck = self.getProperty("SubtractNormBackground")

        #name of the sfCalculator txt file
#        slitsValuePrecision = 0.01       #precision of slits = 10% 
        slitsValuePrecision = sfCalculator.PRECISION
        sfFile = self.getProperty("ScalingFactorFile")
        incidentMedium = self.getProperty("IncidentMediumSelected")
        slitsWidthFlag = self.getProperty("SlitsWidthFlag")
                
        # Pick a good workspace name
        ws_name = "refl%d" % run_numbers[0]
        ws_event_data = ws_name+"_evt"  
        
        # Load the data into its workspace
        allow_multiple = True        
        
        if len(run_numbers)>1 and allow_multiple:

            _list = []
            for _run in run_numbers:
                _list.append(str(_run))
            list_run = ','.join(_list)
            print '** Working with data runs: ' + str(list_run)
            
            for _run in run_numbers:

                ##############################################################
                # Find full path to event NeXus data file
                try:
                    data_file = FileFinder.findRuns("REF_L%d" %_run)[0]
                except RuntimeError:
                    msg = "RefLReduction: could not find run %d\n" % _run
                    msg += "Add your data folder to your User Data Directories in the File menu"
                    raise RuntimeError(msg)
                
                if not mtd.workspaceExists(ws_event_data):
                    LoadEventNexus(Filename=data_file, 
                                   OutputWorkspace=ws_event_data)
                else:
                    LoadEventNexus(Filename=data_file, 
                                   OutputWorkspace='tmp')
                    mt1 = mtd[ws_event_data]
                    mt2 = mtd['tmp']
                    Plus(LHSWorkspace=ws_event_data,
                         RHSWorkspace='tmp',
                         OutputWorkspace=ws_event_data)
        else:

            print '** Working with data run: ' + str(run_numbers[0])
            
            try:
                data_file = FileFinder.findRuns("REF_L%d" %run_numbers[0])[0]
            except RuntimeError:
                msg = "RefLReduction: could not find run %d\n" %run_numbers[0]
                msg += "Add your data folder to your User Data Directories in the File menu"
                raise RuntimeError(msg)

            if not mtd.workspaceExists(ws_event_data):
                LoadEventNexus(Filename=data_file, 
                               OutputWorkspace=ws_event_data)
        
        # Get metadata
        mt_run = mtd[ws_event_data].getRun()
        ##get angles value
        thi_value = mt_run.getProperty('thi').value[0]
        thi_units = mt_run.getProperty('thi').units
        tthd_value = mt_run.getProperty('tthd').value[0]
        tthd_units = mt_run.getProperty('tthd').units
        thi_rad = wks_utility.angleUnitConversion(value=thi_value,
                                                  from_units=thi_units,
                                                  to_units='rad')
        tthd_rad = wks_utility.angleUnitConversion(value=tthd_value,
                                                   from_units=tthd_units,
                                                   to_units='rad')

        # Rebin data (x-axis is in TOF)
        print '-> Rebin'
        ws_histo_data = "_"+ws_name+"_histo"
        Rebin(InputWorkspace=ws_event_data, 
              OutputWorkspace=ws_histo_data, 
              Params=[TOFrange[0], 
                      TOFsteps, 
                      TOFrange[1]],
              PreserveEvents=True)
                
        # Keep only range of TOF of interest
        print '-> Crop TOF range'
        CropWorkspace(ws_histo_data,ws_histo_data,XMin=TOFrange[0], 
                      XMax=TOFrange[1])

        # Normalized by Current (proton charge)
        print '-> Normalize by proton charge'
        NormaliseByCurrent(InputWorkspace=ws_histo_data, 
                           OutputWorkspace=ws_histo_data)
    
        # Calculation of the central pixel (using weighted average)
        pixelXtof_data = wks_utility.getPixelXTOF(mtd[ws_histo_data], 
                                                  maxX=maxX, maxY=maxY)
        pixelXtof_1d = pixelXtof_data.sum(axis=1)
        
        # Keep only range of pixels
        pixelXtof_roi = pixelXtof_1d[data_peak[0]:data_peak[1]]
        
        sz = pixelXtof_roi.size
        _num = 0
        _den = 0
        start_pixel = data_peak[0]
        for i in range(sz):
            _num += (start_pixel * pixelXtof_roi[i])
            start_pixel = start_pixel + 1
            _den += pixelXtof_roi[i]
        data_cpix = _num / _den    
        print '-> Central pixel is {0:.1f}'.format(data_cpix)
        
        # Retrieve geometry of instrument
        # Sample-to-detector distance
        sample = mtd[ws_event_data].getInstrument().getSample()
        source = mtd[ws_event_data].getInstrument().getSource()
        dSM = sample.getDistance(source)
        # Create array of distances pixel->sample
        dPS_array = numpy.zeros((maxY, maxX))
        for x in range(maxX):
            for y in range(maxY):
                _index = maxY * x + y
                detector = mtd[ws_event_data].getDetector(_index)
                dPS_array[y, x] = sample.getDistance(detector)
        # Array of distances pixel->source
        dMP_array = dPS_array + dSM
        # Distance sample->center of detector
        dSD = dPS_array[maxY / 2, maxX / 2]
        # Distance source->center of detector        
        dMD = dSD + dSM

        ws_data = '_' + ws_name + '_DataWks'

        #Even if user select Background subtraction
        #make sure there is a background selection (peak != back selection)        

        _LfromPx = data_back[0]
        _LtoPx = data_peak[0]
        _RfromPx = data_peak[1]
        _RtoPx = data_back[1]

        if ((_LfromPx == _LtoPx) and (_RfromPx == _RtoPx)):
            subtract_data_bck = False
        
        if (subtract_data_bck and (backSubMethod == 1)):

            print '-> substract background'
            ConvertToMatrixWorkspace(InputWorkspace=ws_histo_data,
                                     OutputWorkspace=ws_histo_data)
            
            ws_data_bck = '_' + ws_name + '_DataBckWks'
            
            bBackLeft = False
            if (data_back[0] < (data_peak[0]-1)):

                bBackLeft = True
                ws_data_bck_1 = ws_data_bck + "_1"
                RefRoi(InputWorkspace=ws_histo_data,
                       OutputWorkspace=ws_data_bck_1,
                       NXPixel=maxX,
                       NYPixel=maxY,
                       ConvertToQ=False,
                       IntegrateY=False,
                       SumPixels=True,
                       XPixelMin=data_low_res[0],
                       XPixelMax=data_low_res[1],
                       YPixelMin=data_back[0],
                       YPixelMax=data_peak[0]-1,
                       NormalizeSum=True)

                ws_data_bck_1_rebin = ws_data_bck_1 + '_rebin'
                RebinToWorkspace(WorkspaceToRebin=ws_data_bck_1, 
                                 WorkspaceToMatch=ws_histo_data, 
                                 OutputWorkspace=ws_data_bck_1_rebin)

            bBackRight = False
            if ((data_peak[1]+1) < data_back[1]):

                bBackRight = True
                ws_data_bck_2 = ws_data_bck + "_2"
                RefRoi(InputWorkspace=ws_histo_data,
                       OutputWorkspace=ws_data_bck_2,
                       NXPixel=maxX,
                       NYPixel=maxY,
                       ConvertToQ=False,
                       IntegrateY=False,
                       SumPixels=True,
                       XPixelMin=data_low_res[0],
                       XPixelMax=data_low_res[1],
                       YPixelMin=data_peak[1]+1,
                       YPixelMax=data_back[1],
                       NormalizeSum=True)
            
                ws_data_bck_2_rebin = ws_data_bck_2 + '_rebin'
                RebinToWorkspace(WorkspaceToRebin=ws_data_bck_2, 
                                 WorkspaceToMatch=ws_histo_data, 
                                 OutputWorkspace=ws_data_bck_2_rebin)

            if (bBackLeft and bBackRight):
            
                Plus(RHSWorkspace=ws_data_bck_1_rebin,
                     LHSWorkspace=ws_data_bck_2_rebin,
                     OutputWorkspace=ws_data_bck)
                Scale(InputWorkspace=ws_data_bck,
                      OutputWorkspace=ws_data_bck+'_scale',
                      Factor=0.5,
                      Operation="Multiply")
                
                Minus(LHSWorkspace=ws_histo_data, 
                      RHSWorkspace=ws_data_bck+'_scale', 
                      OutputWorkspace=ws_data)

                if mtd.workspaceExists(ws_data_bck+'_scale'):
                    mtd.deleteWorkspace(ws_data_bck+'_scale')
                
                if mtd.workspaceExists(ws_data_bck):
                    mtd.deleteWorkspace(ws_data_bck)
                
                if mtd.workspaceExists(ws_data_bck_1_rebin):
                    mtd.deleteWorkspace(ws_data_bck_1_rebin)
                
                if mtd.workspaceExists(ws_data_bck_2_rebin):
                    mtd.deleteWorkspace(ws_data_bck_2_rebin)
                
                if mtd.workspaceExists(ws_data_bck_1):
                    mtd.deleteWorkspace(ws_data_bck_1)
                
                if mtd.workspaceExists(ws_data_bck_2):
                    mtd.deleteWorkspace(ws_data_bck_2)
                
                if mtd.workspaceExists(ws_histo_data):
                    mtd.deleteWorkspace(ws_histo_data)

            elif (bBackLeft):
                
                Minus(LHSWorkspace=ws_histo_data,
                      RHSWorkspace=ws_data_bck_1_rebin,
                      OutputWorkspace=ws_data)
                
                if mtd.workspaceExists(ws_data_bck_1_rebin):
                    mtd.deleteWorkspace(ws_data_bck_1_rebin)
                
                if mtd.workspaceExists(ws_data_bck_1):
                    mtd.deleteWorkspace(ws_data_bck_1)
                
            elif (bBackRight):
                
                Minus(LHSWorkspace=ws_histo_data,
                      RHSWorkspace=ws_data_bck_2_rebin,
                      OutputWorkspace=ws_data)

                if mtd.workspaceExists(ws_data_bck_2_rebin):
                    mtd.deleteWorkspace(ws_data_bck_2_rebin)
                
                if mtd.workspaceExists(ws_data_bck_2):
                    mtd.deleteWorkspace(ws_data_bck_2)

            #cleanup (remove all negatives values
            ResetNegatives(InputWorkspace=ws_data,
                           OutputWorkspace=ws_data,
                           AddMinimum=0)
            
            if mtd.workspaceExists(ws_histo_data):
                mtd.deleteWorkspace(ws_histo_data)

        if (subtract_data_bck and (backSubMethod == 2)):
                
            #integrate over the x axis in the low axis range specified
            wks_utility.createIntegratedWorkspace(mtd[ws_histo_data], 
                                                  ws_histo_data+'_1D',
                                                  fromXpixel=data_low_res[0],
                                                  toXpixel=data_low_res[1],
                                                  fromYpixel=0,
                                                  toYpixel=255,
                                                  maxX=maxX,
                                                  maxY=maxY)

            #for each TOF, get the average counts over the two
            #background regions (top and bottom)
            _mt = mtd[ws_histo_data+'_1D']
            _x_axis = _mt.readX(0)[:]
            _nbr_tof = len(_x_axis)
            _tof_range = range(_nbr_tof-1)
            _back_array = zeros(_nbr_tof-1)
            _back_array_error = zeros(_nbr_tof-1) 
            
            #work on left side
            _LfromPx = data_back[0]
            _LtoPx = data_peak[0]
            #work on right side
            _RfromPx = data_peak[1]
            _RtoPx = data_back[1]

            bLeftBack = False            
            if (_LfromPx < _LtoPx):
                _Larray = arange(_LtoPx - _LfromPx) + _LfromPx
                bLeftBack = True
            
            bRightBack = False
            if (_RfromPx < _RtoPx):
                _Rarray = arange(_RtoPx - _RfromPx) + _RfromPx
                bRightBack = True
            
            if (bLeftBack and bRightBack):
                _y_px_range = numpy.append(_Larray,_Rarray)
#                _y_px_range = _y_px_range.flatten()
            else:
                if (bLeftBack):
                    _y_px_range = _Larray
                else: 
                    _y_px_range = _Rarray

            for i in _tof_range:
                _sum = 0.
                _sum_error = 0.
                _pts_summed = 0.
                
                _val = 0.
                _err = 0.
                for j in _y_px_range:
                    _val = float(_mt.readY(int(j))[int(i)])
                    _err = float(_mt.readE(int(j))[int(i)])
                    if (_val != 0 and _err !=0):
                        _new_val = float(_val / _err)
                        _new_err = 1./_err
                        _sum += _new_val
                        _sum_error += _new_err

                if (_val !=0. and _err !=0.):                                        
                    _back_array[i] = float(_sum / _sum_error)
                    _back_array_error[i] = float(1./ _sum_error)

            #substract this number from the rest
            CreateWorkspace(OutputWorkspace='background',
                            DataX=_x_axis,
                            DataY=_back_array,
                            DataE=_back_array_error,
                            UnitX="TOF",
                            ParentWorkspace=mtd[ws_histo_data],
                            NSpec=1)

            #recreate workspace at the end                
            mt1 = mtd[ws_histo_data+'_1D']
            mt2 = mtd['background']
                                    
            Minus(LHSWorkspace=ws_histo_data+'_1D',
                  RHSWorkspace='background',
                  OutputWorkspace=ws_data)

            ResetNegatives(InputWorkspace=ws_data,
                           OutputWorkspace=ws_data,
                           AddMinimum=0)

            if mtd.workspaceExists(ws_histo_data+'_1D'):
                mtd.deleteWorkspace(ws_histo_data+'_1D')

#            SumSpectra(InputWorkspace=ws_data, 
#                       OutputWorkspace='wks_after_back_subtraction_1d')
        
        if (not(subtract_data_bck)):

            wks_utility.createIntegratedWorkspace(mtd[ws_histo_data], 
                                                  ws_data,
                                                  fromXpixel=data_low_res[0],
                                                  toXpixel=data_low_res[1],
                                                  fromYpixel=data_peak[0],
                                                  toYpixel=data_peak[1],
                                                  maxX=maxX,
                                                  maxY=maxY)     
            ConvertToMatrixWorkspace(InputWorkspace=ws_data,
                                     OutputWorkspace=ws_data)
            
#            ConvertToMatrixWorkspace(InputWorkspace=ws_data,
#                                     OutputWorkspace=ws_data)

            if mtd.workspaceExists(ws_histo_data):
                mtd.deleteWorkspace(ws_histo_data)

            mtd.deleteWorkspace(ws_histo_data)

        if (NormFlag):

            print '-> normalization file is ' + str(normalization_run)
            # Find full path to event NeXus data file
            try:
                norm_file = FileFinder.findRuns("REF_L%d" %normalization_run)[0]
            except RuntimeError:
                msg = "RefLReduction: could not find run %d\n" %normalization_run
                msg += "Add your data folder to your User Data Directories in the File menu"
                raise RuntimeError(msg)
            
            #load normalization file
            ws_name = "_normalization_refl%d" % normalization_run
            ws_norm_event_data = ws_name+"_evt"  
            ws_norm_histo_data = ws_name+"_histo"  

            if not mtd.workspaceExists(ws_norm_event_data):
                LoadEventNexus(Filename=norm_file, 
                               OutputWorkspace=ws_norm_event_data)
 
            # Rebin data
            print '-> rebin normalization'
            Rebin(InputWorkspace=ws_norm_event_data, 
                  OutputWorkspace=ws_norm_histo_data, 
                  Params=[TOFrange[0], 
                          TOFsteps, 
                          TOFrange[1]])
 
            # Keep only range of TOF of interest
            print '-> Crop TOF range'
            CropWorkspace(InputWorkspace=ws_norm_histo_data,
                          OutputWorkspace=ws_norm_histo_data,
                          XMin=TOFrange[0], 
                          XMax=TOFrange[1])
            
            # Normalized by Current (proton charge)
            print '-> normalized by current direct beam'
            NormaliseByCurrent(InputWorkspace=ws_norm_histo_data, 
                               OutputWorkspace=ws_norm_histo_data)

            ws_data_bck = '_' + ws_name + '_NormBckWks'
#            ws_norm_rebinned = '_' + ws_name + '_NormRebinnedWks'
            ws_norm_rebinned = ws_name + '_NormRebinnedWks'
            if (subtract_norm_bck and (backSubMethod == 1)):
                
                print '-> substract background to direct beam'
                ConvertToMatrixWorkspace(InputWorkspace=ws_norm_histo_data,
                                         OutputWorkspace=ws_norm_histo_data)
                            
                ws_norm_bck = '_' + ws_name + '_NormBckWks'
                
                bBackLeft = False
                if (norm_back[0] < (norm_peak[0]-1)):
            
                    bBackLeft = True
                    ws_norm_bck_1 = ws_norm_bck + "_1"
                    RefRoi(InputWorkspace=ws_norm_histo_data,
                           OutputWorkspace=ws_norm_bck_1,
                           NXPixel=maxX,
                           NYPixel=maxY,
                           ConvertToQ=False,
                           IntegrateY=False,
                           SumPixels=True,
                           XPixelMin=norm_low_res[0],
                           XPixelMax=norm_low_res[1],
                           YPixelMin=norm_back[0],
                           YPixelMax=norm_peak[0]-1,
                           NormalizeSum=True)
                           
                    ws_norm_bck_1_rebin = ws_norm_bck_1 + '_rebin'
                    RebinToWorkspace(WorkspaceToRebin=ws_norm_bck_1, 
                                     WorkspaceToMatch=ws_norm_histo_data, 
                                     OutputWorkspace=ws_norm_bck_1_rebin)


                bBackRight = False
                if ((norm_peak[1]+1) < norm_back[1]):

                    bBackRight = True
                    ws_norm_bck_2 = ws_norm_bck + "_2"
                    RefRoi(InputWorkspace=ws_norm_histo_data,
                           OutputWorkspace=ws_norm_bck_2,
                           NXPixel=maxX,
                           NYPixel=maxY,
                           ConvertToQ=False,
                           IntegrateY=False,
                           SumPixels=True,
                           XPixelMin=norm_low_res[0],
                           XPixelMax=norm_low_res[1],
                           YPixelMin=norm_peak[1]+1,
                           YPixelMax=norm_back[1],
                           NormalizeSum=True)
            
                    ws_norm_bck_2_rebin = ws_norm_bck_2 + '_rebin'
                    RebinToWorkspace(WorkspaceToRebin=ws_norm_bck_2, 
                                     WorkspaceToMatch=ws_norm_histo_data, 
                                     OutputWorkspace=ws_norm_bck_2_rebin)

                if (bBackLeft and bBackRight):

                    Plus(RHSWorkspace=ws_norm_bck_1_rebin,
                         LHSWorkspace=ws_norm_bck_2_rebin,
                         OutputWorkspace=ws_norm_bck)
                    Scale(InputWorkspace=ws_norm_bck,
                          OutputWorkspace=ws_norm_bck+'_scale',
                          Factor=0.5,
                          Operation="Multiply")

                    Minus(LHSWorkspace=ws_norm_histo_data, 
                          RHSWorkspace=ws_norm_bck+'_scale', 
                          OutputWorkspace=ws_norm_rebinned)
                    
                    if mtd.workspaceExists(ws_norm_bck_1_rebin):
                        mtd.deleteWorkspace(ws_norm_bck_1_rebin)
                    
                    if mtd.workspaceExists(ws_norm_bck_2_rebin):
                        mtd.deleteWorkspace(ws_norm_bck_2_rebin)
                    
                    if mtd.workspaceExists(ws_norm_bck_1):
                        mtd.deleteWorkspace(ws_norm_bck_1)
                    
                    if mtd.workspaceExists(ws_norm_bck_2):
                        mtd.deleteWorkspace(ws_norm_bck_2)
                    
                    if mtd.workspaceExists(ws_norm_histo_data):
                        mtd.deleteWorkspace(ws_norm_histo_data)
                    
                    if mtd.workspaceExists(ws_norm_bck+'_scale'):
                        mtd.deleteWorkspace(ws_norm_bck+'_scale')

                elif (bBackLeft):
                    
                    Minus(LHSWorkspace=ws_norm_histo_data,
                          RHSWorkspace=ws_norm_bck_1_rebin,
                          OutputWorkspace=ws_norm_rebinned)
                    
                    if mtd.workspaceExists(ws_norm_bck_1_rebin):
                        mtd.deleteWorkspace(ws_norm_bck_1_rebin)
                    
                    if mtd.workspaceExists(ws_norm_bck_1):
                        mtd.deleteWorkspace(ws_norm_bck_1)
                        
                    if mtd.workspaceExists(ws_norm_histo_data):
                        mtd.deleteWorkspace(ws_norm_histo_data)

                elif (bBackRight):
                    
                    Minus(LHSWorkspace=ws_norm_histo_data,
                          RHSWorkspace=ws_norm_bck_2_rebin,
                          OutputWorkspace=ws_norm_rebinned)
                    
                    if mtd.workspaceExists(ws_norm_bck_2_rebin):
                        mtd.deleteWorkspace(ws_norm_bck_2_rebin)
                        
                    if mtd.workspaceExists(ws_norm_bck_2):
                        mtd.deleteWorkspace(ws_norm_bck_2)
                    
                    if mtd.workspaceExists(ws_norm_histo_data):
                        mtd.deleteWorkspace(ws_norm_histo_data)

                
                #Here I need to set to zeros all the negative entries
                ResetNegatives(InputWorkspace=ws_norm_rebinned,
                               OutputWorkspace=ws_norm_rebinned,
                               AddMinimum=0)

                wks_utility.createIntegratedWorkspace(mtd[ws_norm_rebinned], 
                                                      ws_norm_rebinned,
                                                      fromXpixel=norm_low_res[0],
                                                      toXpixel=norm_low_res[1],
                                                      fromYpixel=norm_peak[0],
                                                      toYpixel=norm_peak[1],
                                                      maxX=maxX,
                                                      maxY=maxY,
                                                      bCleaning=True)

            if (subtract_norm_bck and (backSubMethod == 2)):
                     
                #integrate over the x axis in the low axis range specified
                wks_utility.createIntegratedWorkspace(mtd[ws_norm_histo_data], 
                                                      ws_norm_histo_data+'_1D',
                                                      fromXpixel=norm_low_res[0],
                                                      toXpixel=norm_low_res[1],
                                                      fromYpixel=0,
                                                      toYpixel=255,
                                                      maxX=maxX,
                                                      maxY=maxY)

                #for each TOF, get the average counts over the two
                #background regions (top and bottom)
                _mt = mtd[ws_norm_histo_data+'_1D']
                _x_axis = _mt.readX(0)[:]
                _nbr_tof = len(_x_axis)
                _tof_range = range(_nbr_tof-1)
                _back_array = zeros(_nbr_tof-1)
                _back_array_error = zeros(_nbr_tof-1) 
            
                #work on left side
                _LfromPx = norm_back[0]
                _LtoPx = norm_peak[0]
                #work on right side
                _RfromPx = norm_peak[1]
                _RtoPx = norm_back[1]

                bLeftBack = False            
                if (_LfromPx < _LtoPx):
                    _Larray = arange(_LtoPx - _LfromPx) + _LfromPx
                    bLeftBack = True
            
                bRightBack = False
                if (_RfromPx < _RtoPx):
                    _Rarray = arange(_RtoPx - _RfromPx) + _RfromPx
                    bRightBack = True
            
                if (bLeftBack and bRightBack):
                    _y_px_range = numpy.append(_Larray,_Rarray)
                else:
                    if (bLeftBack):
                        _y_px_range = _Larray
                    else:
                        _y_px_range = _Rarray

                for i in _tof_range:
                    _sum = 0.
                    _sum_error = 0.
                    _pts_summed = 0.
                
                    _val = 0.
                    _err = 0.
                    for j in _y_px_range:
                        _val = float(_mt.readY(int(j))[int(i)])
                        _err = float(_mt.readE(int(j))[int(i)])
                        if (_val != 0 and _err !=0):
                            _new_val = float(_val / _err)
                            _new_err = 1./_err
                            _sum += _new_val
                            _sum_error += _new_err

                    if (_val !=0. and _err !=0.):                                        
                        _back_array[i] = float(_sum / _sum_error)
                        _back_array_error[i] = float(1./ _sum_error)
              
                #substract this number from the rest
                CreateWorkspace(OutputWorkspace='background',
                                DataX=_x_axis,
                                DataY=_back_array,
                                DataE=_back_array_error,
                                UnitX="TOF",
                                ParentWorkspace=mtd[ws_norm_histo_data],
                                NSpec=1)
            
#                #recreate workspace at the end                
#                mt1 = mtd[ws_norm_histo_data+'_1D']
#                mt2 = mtd['background']
                                    
                Minus(LHSWorkspace=ws_norm_histo_data+'_1D',
                      RHSWorkspace='background',
                      OutputWorkspace=ws_norm_rebinned)

                if mtd.workspaceExists(ws_norm_histo_data+'_1D'):
                    mtd.deleteWorkspace(ws_norm_histo_data+'_1D')
                    
                if mtd.workspaceExists('background'):                    
                    mtd.deleteWorkspace('background')

                ResetNegatives(InputWorkspace=ws_norm_rebinned,
                               OutputWorkspace=ws_norm_rebinned,
                               AddMinimum=0)

            else:
            
                #Create a new event workspace of only the range of pixel of interest 
                #background range (along the y-axis) and of only the pixel
                #of interest along the x-axis (to avoid the frame effect)
                ws_integrated_data = '_' + ws_name + '_IntegratedNormWks'
                wks_utility.createIntegratedWorkspace(mtd[ws_norm_histo_data], 
                                                      ws_integrated_data,
                                                      fromXpixel=norm_low_res[0],
                                                      toXpixel=norm_low_res[1],
                                                      fromYpixel=norm_peak[0],
                                                      toYpixel=norm_peak[1],
                                                      maxX=maxX,
                                                      maxY=maxY)
            
                RebinToWorkspace(WorkspaceToRebin=ws_integrated_data,
                                 WorkspaceToMatch=ws_data,
                                 OutputWorkspace=ws_norm_rebinned)

                if mtd.workspaceExists(ws_integrated_data):
                    mtd.deleteWorkspace(ws_integrated_data)

            #Normalization    
            print '-> Sum spectra'       
            SumSpectra(InputWorkspace=ws_norm_rebinned, 
                       OutputWorkspace=ws_norm_rebinned)
             
            #### divide data by normalize histo workspace
            print '-> Divide data by direct beam'
            Divide(LHSWorkspace=ws_data,
                   RHSWorkspace=ws_norm_rebinned,
                   OutputWorkspace=ws_data)

        #now we can convert to Q
        
        theta = math.fabs(tthd_rad - thi_rad)/2.
        AngleOffset_deg = float(self.getProperty("AngleOffset"))
        AngleOffset_rad = (AngleOffset_deg * math.pi) / 180.
        theta += AngleOffset_rad

        #this is where we need to apply the scaling factor
        sfFile = self.getProperty("ScalingFactorFile")
        incidentMedium = self.getProperty("IncidentMediumSelected")
        if os.path.isfile(sfFile):
            print '-> Apply automatic SF!'        
            print '--> using SF config file: ' + sfFile
            ws_data_scaled = wks_utility.applySF(ws_data,
                                                 incidentMedium,
                                                 sfFile,
                                                 slitsValuePrecision,
                                                 slitsWidthFlag)
            
        else:
            print '-> Automatic SF not applied!'
            print '--> unknown or no SF config file defined !'
            ws_data_scaled = ws_data
            
        if dMD is not None and theta is not None:

            if bDebug:
                print 'DEBUG: theta= {0:4f}'.format(theta) 
                    
            _tof_axis = mtd[ws_data].readX(0)
            _const = float(4) * math.pi * m * dMD / h
            sz_tof = numpy.shape(_tof_axis)[0]
            _q_axis = zeros(sz_tof-1)
            for t in range(sz_tof-1):
                tof1 = _tof_axis[t]
                tof2 = _tof_axis[t+1]
                tofm = (tof1+tof2)/2.
                _Q = _const * math.sin(theta) / (tofm*1e-6)
                _q_axis[t] = _Q*1e-10
            q_max = max(_q_axis)
            if (q_min >= q_max):
                q_min = min(_q_axis)

            if bDebug:
                print 'DEBUG: [q_min:q_bin:q_max]=[{0:4f},{1:4f},{2:4f}]'.format(q_min, q_step, q_max) 

        if (backSubMethod == 1):        
            ws_integrated_data = ws_name + '_IntegratedDataWks'
            print '-> keep only range of pixel of interest' 
            
            wks_utility.createIntegratedWorkspace(mtd[ws_data_scaled], 
                                                  ws_integrated_data,
                                                  fromXpixel=data_low_res[0],
                                                  toXpixel=data_low_res[1],
                                                  fromYpixel=data_peak[0],
                                                  toYpixel=data_peak[1],
                                                  maxX=maxX,
                                                  maxY=maxY)     
            
            ws_data_cleaned = ws_name + '_cleaned'
#            wks_utility.cleanup_data(InputWorkspace=ws_integrated_data,
#                                     OutputWorkspace=ws_data_cleaned,
#                                     maxY=maxY)

            #        mtd.deleteWorkspace(ws_data_scaled)
            #        mtd.deleteWorkspace(ws_data)
            ws_data_Q = ws_data + '_Q'
            print '-> convert to Q'
            #        wks_utility.convertWorkspaceToQ(ws_data_scaled,
            wks_utility.convertWorkspaceToQ(ws_integrated_data,
#            wks_utility.convertWorkspaceToQ(ws_data_cleaned,
                                            ws_data_Q,
                                            fromYpixel=data_peak[0],
                                            toYpixel=data_peak[1],
                                            cpix=data_cpix,
                                            source_to_detector=dMD,
                                            sample_to_detector=dSD,
                                            theta=theta,
                                            geo_correction=False,
                                            q_binning=[q_min,q_step,q_max])

            if mtd.workspaceExists(ws_integrated_data):
                mtd.deleteWorkspace(ws_integrated_data)

        else:
            ws_data_Q = ws_data + '_Q'
            print '-> convert to Q'
            
            wks_utility.convertWorkspaceToQ(ws_data_scaled,
                                            ws_data_Q,
                                            fromYpixel=data_peak[0],
                                            toYpixel=data_peak[1],
                                            cpix=data_cpix,
                                            source_to_detector=dMD,
                                            sample_to_detector=dSD,
                                            theta=theta,
                                            geo_correction=True,
                                            q_binning=[q_min,q_step,q_max])

            if mtd.workspaceExists(ws_data_scaled):
                mtd.deleteWorkspace(ws_data_scaled)


        print '-> replace special values'
        mt = mtd[ws_data_Q]
        ReplaceSpecialValues(InputWorkspace=ws_data_Q, 
                             NaNValue=0, 
                             NaNError=0, 
                             InfinityValue=0, 
                             InfinityError=0, 
                             OutputWorkspace=ws_data_Q)
        
        output_ws = self.getPropertyValue("OutputWorkspace")        
        
        #add a unique time stamp to the data to sort them for the 
        #stitching process
        import time
        _time = int(time.time())
        output_ws = output_ws + '_#' + str(_time) + 'ts'
        
        if mtd.workspaceExists(output_ws):
            mtd.deleteWorkspace(output_ws)
            
        print '-> sum spectra'    
        SumSpectra(InputWorkspace=ws_data_Q, OutputWorkspace=output_ws)

        #keep only none zero values
        try:
            print '-> keep only non-zeros values'
            mt = mtd[output_ws]
            sz = shape(mt.readY(0)[:])[0]
            data_x = []
            data_y = []
            data_y_error = []
            for i in range(sz):
                _y = mt.readY(0)[i]
                #print '_y={0:3f} at i={1:2d}'.format(_y, i)
                if _y != 0.:
                    data_x.append(mt.readX(0)[i])
                    data_y.append(_y)
                    data_y_error.append(mt.readE(0)[i])
        
            #if at least one non zero value found
            if data_x != []:
                print '-> cleanup data (remove zeros)'
                CreateWorkspace(OutputWorkspace=output_ws,
                                DataX=data_x,
                                DataY=data_y,
                                DataE=data_y_error,
                                Nspec=1,
                                UnitX="MomentumTransfer")
        except:
            pass

        #removing first and last Q points (edge effect) 
        mt=mtd[output_ws]
        x_axis = mt.readX(0)[:]
        if (len(x_axis) > 2):
            print '-> remove first and last point (edge effet)'
            qmin = x_axis[1]
            qmax = x_axis[-2]
            CropWorkspace(InputWorkspace=output_ws,
                          OutputWorkspace=output_ws,
                          XMin=qmin, XMax=qmax)

         #space
        self.setProperty("OutputWorkspace", mtd[output_ws])
        
        #cleanup all workspace used
        print '-> Cleaning useless workspaces'
        if mtd.workspaceExists(ws_event_data):
            mtd.deleteWorkspace(ws_event_data)
        
        if mtd.workspaceExists(ws_data_Q):
            mtd.deleteWorkspace(ws_data_Q)
        
        if mtd.workspaceExists(ws_data):
            mtd.deleteWorkspace(ws_data)
        
        if (NormFlag):
            if mtd.workspaceExists(ws_norm_event_data):
                mtd.deleteWorkspace(ws_norm_event_data)
        
        print
Exemplo n.º 17
0
 def tearDown(self):
     for name in ('A','B','C','D','E','F','G','H'):
         mtd.remove(name)
     mtd.remove('mdw')
Exemplo n.º 18
0
 def tearDown(self):
     mtd.remove('red')
Exemplo n.º 19
0
 def tearDown(self):
     mtd.remove('mdw')
Exemplo n.º 20
0
 def tearDown(self):
     mtd.remove('mdw')
Exemplo n.º 21
0
 def tearDown(self):
     for name in ('A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'):
         mtd.remove(name)
     mtd.remove('mdw')
Exemplo n.º 22
0
 def tearDown(self):
     if self._ws_name in mtd:
         mtd.remove(self._ws_name)
     if self._ws_name2 in mtd:
         mtd.remove(self._ws_name2)
 def tearDown(self):
     if self._ws_name in mtd:
         mtd.remove(self._ws_name)
     if self._ws_name2 in mtd:
         mtd.remove(self._ws_name2)