Пример #1
0
def SensitivityCorrection(flood_data,
                          min_sensitivity=0.5,
                          max_sensitivity=1.5,
                          dark_current=None,
                          use_sample_dc=False):
    flood_data = find_data(flood_data,
                           instrument=ReductionSingleton().get_instrument())
    if dark_current is not None:
        dark_current = find_data(
            dark_current, instrument=ReductionSingleton().get_instrument())

    ReductionSingleton().reduction_properties["SensitivityFile"] = flood_data
    ReductionSingleton(
    ).reduction_properties["MinEfficiency"] = min_sensitivity
    ReductionSingleton(
    ).reduction_properties["MaxEfficiency"] = max_sensitivity
    if dark_current is not None:
        ReductionSingleton(
        ).reduction_properties["SensitivityDarkCurrentFile"] = dark_current
    elif "SensitivityDarkCurrentFile" in ReductionSingleton(
    ).reduction_properties:
        del ReductionSingleton(
        ).reduction_properties["SensitivityDarkCurrentFile"]
    if "SensitivityBeamCenterX" in ReductionSingleton().reduction_properties:
        del ReductionSingleton().reduction_properties["SensitivityBeamCenterX"]
    if "SensitivityBeamCenterY" in ReductionSingleton().reduction_properties:
        del ReductionSingleton().reduction_properties["SensitivityBeamCenterY"]
    ReductionSingleton().reduction_properties["UseDefaultDC"] = use_sample_dc
Пример #2
0
def Background(datafile):
    if isinstance(datafile, list):
        datafile = ','.join(datafile)
    find_data(datafile,
              instrument=ReductionSingleton().get_instrument(),
              allow_multiple=True)
    ReductionSingleton().reduction_properties["BackgroundFiles"] = datafile
Пример #3
0
def BckBeamSpreaderTransmission(sample_spreader,
                                direct_spreader,
                                sample_scattering,
                                direct_scattering,
                                spreader_transmission=1.0,
                                spreader_transmission_err=0.0,
                                theta_dependent=True):
    sample_spreader = find_data(
        sample_spreader, instrument=ReductionSingleton().get_instrument())
    direct_spreader = find_data(
        direct_spreader, instrument=ReductionSingleton().get_instrument())
    sample_scattering = find_data(
        sample_scattering, instrument=ReductionSingleton().get_instrument())
    direct_scattering = find_data(
        direct_scattering, instrument=ReductionSingleton().get_instrument())

    ReductionSingleton(
    ).reduction_properties["BckTransmissionMethod"] = "BeamSpreader"
    ReductionSingleton(
    ).reduction_properties["BckTransSampleSpreaderFilename"] = sample_spreader
    ReductionSingleton(
    ).reduction_properties["BckTransDirectSpreaderFilename"] = direct_spreader
    ReductionSingleton().reduction_properties[
        "BckTransSampleScatteringFilename"] = sample_scattering
    ReductionSingleton().reduction_properties[
        "BckTransDirectScatteringFilename"] = direct_scattering
    ReductionSingleton().reduction_properties[
        "BckSpreaderTransmissionValue"] = spreader_transmission
    ReductionSingleton().reduction_properties[
        "BckSpreaderTransmissionError"] = spreader_transmission_err
    ReductionSingleton(
    ).reduction_properties["BckThetaDependentTransmission"] = theta_dependent
def SensitivityCorrection(
        flood_data,
        min_sensitivity=0.5,
        max_sensitivity=1.5,
        dark_current=None,
        use_sample_dc=False):
    flood_data = find_data(
        flood_data,
        instrument=ReductionSingleton().get_instrument())
    if dark_current is not None:
        dark_current = find_data(
            dark_current,
            instrument=ReductionSingleton().get_instrument())

    ReductionSingleton().reduction_properties["SensitivityFile"] = flood_data
    ReductionSingleton().reduction_properties[
        "MinEfficiency"] = min_sensitivity
    ReductionSingleton().reduction_properties[
        "MaxEfficiency"] = max_sensitivity
    if dark_current is not None:
        ReductionSingleton().reduction_properties[
            "SensitivityDarkCurrentFile"] = dark_current
    elif "SensitivityDarkCurrentFile" in ReductionSingleton().reduction_properties:
        del ReductionSingleton().reduction_properties[
            "SensitivityDarkCurrentFile"]
    if "SensitivityBeamCenterX" in ReductionSingleton().reduction_properties:
        del ReductionSingleton().reduction_properties["SensitivityBeamCenterX"]
    if "SensitivityBeamCenterY" in ReductionSingleton().reduction_properties:
        del ReductionSingleton().reduction_properties["SensitivityBeamCenterY"]
    ReductionSingleton().reduction_properties["UseDefaultDC"] = use_sample_dc
def Background(datafile):
    if isinstance(datafile, list):
        datafile = ','.join(datafile)
    find_data(
        datafile,
        instrument=ReductionSingleton().get_instrument(),
        allow_multiple=True)
    ReductionSingleton().reduction_properties["BackgroundFiles"] = datafile
Пример #6
0
def TransmissionDirectBeamCenter(datafile):
    datafile = find_data(datafile,
                         instrument=ReductionSingleton().get_instrument())
    ReductionSingleton(
    ).reduction_properties["TransmissionBeamCenterMethod"] = "DirectBeam"
    ReductionSingleton(
    ).reduction_properties["TransmissionBeamCenterFile"] = datafile
Пример #7
0
 def _multiple_load(self, data_file, workspace, property_manager, property_manager_name):
     instrument = ''
     if property_manager.existsProperty('InstrumentName'):
         property_manager.existsProperty('InstrumentName')
         instrument = property_manager.getProperty('InstrumentName').value
     else:
         property_manager.existsProperty('InstrumentName')
     output_str = ''
     if isinstance(data_file, str):
         if AnalysisDataService.doesExist(data_file):
             data_file = [data_file]
         else:
             data_file = find_data(data_file, instrument=instrument, allow_multiple=True)
     if isinstance(data_file, list):
         for i in range(len(data_file)):
             if i == 0:
                 output_str += self._load_data(data_file[i], workspace, property_manager, property_manager_name)
                 continue
             output_str += self._load_data(data_file[i], '__tmp_wksp', property_manager, property_manager_name)
             api.RebinToWorkspace(WorkspaceToRebin='__tmp_wksp', WorkspaceToMatch=workspace,
                                  OutputWorkspace='__tmp_wksp')
             api.Plus(LHSWorkspace=workspace, RHSWorkspace='__tmp_wksp', OutputWorkspace=workspace)
         if AnalysisDataService.doesExist('__tmp_wksp'):
             AnalysisDataService.remove('__tmp_wksp')
     else:
         output_str += 'Loaded %s\n' % data_file
         output_str += self._load_data(data_file, workspace, property_manager, property_manager_name)
     return output_str
Пример #8
0
 def _multiple_load(self, data_file, workspace, property_manager, property_manager_name):
     instrument = ''
     if property_manager.existsProperty('InstrumentName'):
         property_manager.existsProperty('InstrumentName')
         instrument = property_manager.getProperty('InstrumentName').value
     else:
         property_manager.existsProperty('InstrumentName')
     output_str = ''
     if type(data_file) == str:
         if AnalysisDataService.doesExist(data_file):
             data_file = [data_file]
         else:
             data_file = find_data(data_file, instrument=instrument, allow_multiple=True)
     if type(data_file) == list:
         for i in range(len(data_file)):
             if i == 0:
                 output_str += self._load_data(data_file[i], workspace, property_manager, property_manager_name)
                 continue
             output_str += self._load_data(data_file[i], '__tmp_wksp', property_manager, property_manager_name)
             api.Plus(LHSWorkspace=workspace, RHSWorkspace='__tmp_wksp', OutputWorkspace=workspace)
         if AnalysisDataService.doesExist('__tmp_wksp'):
             AnalysisDataService.remove('__tmp_wksp')
     else:
         output_str += 'Loaded %s\n' % data_file
         output_str += self._load_data(data_file, workspace, property_manager, property_manager_name)
     return output_str
def DirectBeamCenter(datafile):
    datafile = find_data(
        datafile,
        instrument=ReductionSingleton().get_instrument())
    ReductionSingleton().reduction_properties[
        "BeamCenterMethod"] = "DirectBeam"
    ReductionSingleton().reduction_properties["BeamCenterFile"] = datafile
Пример #10
0
def SensitivityDirectBeamCenter(datafile):
    datafile = find_data(datafile,
                         instrument=ReductionSingleton().get_instrument())
    ReductionSingleton(
    ).reduction_properties["SensitivityBeamCenterMethod"] = "DirectBeam"
    ReductionSingleton(
    ).reduction_properties["SensitivityBeamCenterFile"] = datafile
Пример #11
0
    def _multiple_load(self, data_file, workspace, 
                       property_manager, property_manager_name):
        # Check whether we have a list of files that need merging
        #   Make sure we process a list of files written as a string
        def _load_data(filename, output_ws):
            if not property_manager.existsProperty("LoadAlgorithm"):
                raise RuntimeError, "SANS reduction not set up properly: missing load algorithm"
            p=property_manager.getProperty("LoadAlgorithm")
            alg=Algorithm.fromString(p.valueAsStr)
            alg.setProperty("Filename", filename)
            alg.setProperty("OutputWorkspace", output_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            msg = "Loaded %s\n" % filename
            if alg.existsProperty("OutputMessage"):
                msg = alg.getProperty("OutputMessage").value
            return msg
            
        # Get instrument to use with FileFinder
        instrument = ''
        if property_manager.existsProperty("InstrumentName"):
            instrument = property_manager.getProperty("InstrumentName").value

        output_str = ''
        if type(data_file)==str:
            data_file = find_data(data_file, instrument=instrument, allow_multiple=True)
        if type(data_file)==list:
            monitor = 0.0
            timer = 0.0 
            for i in range(len(data_file)):
                output_str += "Loaded %s\n" % data_file[i]
                if i==0:
                    output_str += _load_data(data_file[i], workspace)
                else:
                    output_str += _load_data(data_file[i], '__tmp_wksp')
                    api.Plus(LHSWorkspace=workspace,
                         RHSWorkspace='__tmp_wksp',
                         OutputWorkspace=workspace)
                    # Get the monitor and timer values
                    ws = AnalysisDataService.retrieve('__tmp_wksp')
                    monitor += ws.getRun().getProperty("monitor").value
                    timer += ws.getRun().getProperty("timer").value
            
            # Get the monitor and timer of the first file, which haven't yet
            # been added to the total
            ws = AnalysisDataService.retrieve(workspace)
            monitor += ws.getRun().getProperty("monitor").value
            timer += ws.getRun().getProperty("timer").value
                    
            # Update the timer and monitor
            ws.getRun().addProperty("monitor", monitor, True)
            ws.getRun().addProperty("timer", timer, True)
            
            if AnalysisDataService.doesExist('__tmp_wksp'):
                AnalysisDataService.remove('__tmp_wksp')              
        else:
            output_str += "Loaded %s\n" % data_file
            output_str += _load_data(data_file, workspace)
        return output_str
Пример #12
0
def ScatteringBeamCenter(datafile, beam_radius=3.0):
    datafile = find_data(datafile,
                         instrument=ReductionSingleton().get_instrument())
    ReductionSingleton(
    ).reduction_properties["BeamCenterMethod"] = "Scattering"
    ReductionSingleton().reduction_properties["BeamRadius"] = beam_radius
    ReductionSingleton().reduction_properties["BeamCenterFile"] = datafile
Пример #13
0
def ScatteringBeamCenter(datafile, beam_radius=3.0):
    datafile = find_data(
        datafile,
        instrument=ReductionSingleton().get_instrument())
    ReductionSingleton().reduction_properties[
        "BeamCenterMethod"] = "Scattering"
    ReductionSingleton().reduction_properties["BeamRadius"] = beam_radius
    ReductionSingleton().reduction_properties["BeamCenterFile"] = datafile
Пример #14
0
def BckDirectBeamTransmission(sample_file,
                              empty_file,
                              beam_radius=3.0,
                              theta_dependent=True):
    sample_file = find_data(sample_file,
                            instrument=ReductionSingleton().get_instrument())
    empty_file = find_data(empty_file,
                           instrument=ReductionSingleton().get_instrument())
    ReductionSingleton(
    ).reduction_properties["BckTransmissionMethod"] = "DirectBeam"
    ReductionSingleton(
    ).reduction_properties["BckTransmissionBeamRadius"] = beam_radius
    ReductionSingleton(
    ).reduction_properties["BckTransmissionSampleDataFile"] = sample_file
    ReductionSingleton(
    ).reduction_properties["BckTransmissionEmptyDataFile"] = empty_file
    ReductionSingleton(
    ).reduction_properties["BckThetaDependentTransmission"] = theta_dependent
Пример #15
0
def BckTransmissionDarkCurrent(dark_current=None):
    if dark_current is not None:
        dark_current = find_data(
            dark_current, instrument=ReductionSingleton().get_instrument())
        ReductionSingleton(
        ).reduction_properties["BckTransmissionDarkCurrentFile"] = dark_current
    elif "BckTransmissionDarkCurrentFile" in ReductionSingleton(
    ).reduction_properties:
        del ReductionSingleton(
        ).reduction_properties["BckTransmissionDarkCurrentFile"]
Пример #16
0
def BckTransmissionDarkCurrent(dark_current=None):
    if dark_current is not None:
        dark_current = find_data(
            dark_current,
            instrument=ReductionSingleton().get_instrument())
        ReductionSingleton().reduction_properties[
            "BckTransmissionDarkCurrentFile"] = dark_current
    elif "BckTransmissionDarkCurrentFile" in ReductionSingleton().reduction_properties:
        del ReductionSingleton().reduction_properties[
            "BckTransmissionDarkCurrentFile"]
Пример #17
0
def BckDirectBeamTransmission(
        sample_file,
        empty_file,
        beam_radius=3.0,
        theta_dependent=True):
    sample_file = find_data(
        sample_file,
        instrument=ReductionSingleton().get_instrument())
    empty_file = find_data(
        empty_file,
        instrument=ReductionSingleton().get_instrument())
    ReductionSingleton().reduction_properties[
        "BckTransmissionMethod"] = "DirectBeam"
    ReductionSingleton().reduction_properties[
        "BckTransmissionBeamRadius"] = beam_radius
    ReductionSingleton().reduction_properties[
        "BckTransmissionSampleDataFile"] = sample_file
    ReductionSingleton().reduction_properties[
        "BckTransmissionEmptyDataFile"] = empty_file
    ReductionSingleton().reduction_properties[
        "BckThetaDependentTransmission"] = theta_dependent
Пример #18
0
def BckBeamSpreaderTransmission(
        sample_spreader,
        direct_spreader,
        sample_scattering,
        direct_scattering,
        spreader_transmission=1.0,
        spreader_transmission_err=0.0,
        theta_dependent=True):
    sample_spreader = find_data(
        sample_spreader,
        instrument=ReductionSingleton().get_instrument())
    direct_spreader = find_data(
        direct_spreader,
        instrument=ReductionSingleton().get_instrument())
    sample_scattering = find_data(
        sample_scattering,
        instrument=ReductionSingleton().get_instrument())
    direct_scattering = find_data(
        direct_scattering,
        instrument=ReductionSingleton().get_instrument())

    ReductionSingleton().reduction_properties[
        "BckTransmissionMethod"] = "BeamSpreader"
    ReductionSingleton().reduction_properties[
        "BckTransSampleSpreaderFilename"] = sample_spreader
    ReductionSingleton().reduction_properties[
        "BckTransDirectSpreaderFilename"] = direct_spreader
    ReductionSingleton().reduction_properties[
        "BckTransSampleScatteringFilename"] = sample_scattering
    ReductionSingleton().reduction_properties[
        "BckTransDirectScatteringFilename"] = direct_scattering
    ReductionSingleton().reduction_properties[
        "BckSpreaderTransmissionValue"] = spreader_transmission
    ReductionSingleton().reduction_properties[
        "BckSpreaderTransmissionError"] = spreader_transmission_err
    ReductionSingleton().reduction_properties[
        "BckThetaDependentTransmission"] = theta_dependent
Пример #19
0
 def _multiple_load(self, data_file, workspace, property_manager, property_manager_name):
     instrument = ""
     if property_manager.existsProperty("InstrumentName"):
         property_manager.existsProperty("InstrumentName")
         instrument = property_manager.getProperty("InstrumentName").value
     else:
         property_manager.existsProperty("InstrumentName")
     output_str = ""
     if type(data_file) == str:
         data_file = find_data(data_file, instrument=instrument, allow_multiple=True)
     if type(data_file) == list:
         for i in range(len(data_file)):
             output_str += "Loaded %s\n" % data_file[i]
             if i == 0:
                 output_str += self._load_data(data_file[i], workspace, property_manager, property_manager_name)
                 continue
             output_str += self._load_data(data_file[i], "__tmp_wksp", property_manager, property_manager_name)
             api.Plus(LHSWorkspace=workspace, RHSWorkspace="__tmp_wksp", OutputWorkspace=workspace)
         if AnalysisDataService.doesExist("__tmp_wksp"):
             AnalysisDataService.remove("__tmp_wksp")
     else:
         output_str += "Loaded %s\n" % data_file
         output_str += self._load_data(data_file, workspace, property_manager, property_manager_name)
     return output_str
Пример #20
0
def DarkCurrent(datafile):
    datafile = find_data(datafile,
                         instrument=ReductionSingleton().get_instrument())
    ReductionSingleton().reduction_properties["DarkCurrentFile"] = datafile
Пример #21
0
def DarkCurrent(datafile):
    datafile = find_data(
        datafile,
        instrument=ReductionSingleton().get_instrument())
    ReductionSingleton().reduction_properties["DarkCurrentFile"] = datafile
Пример #22
0
    def _multiple_load(self, data_file, workspace, property_manager,
                       property_manager_name):
        # Check whether we have a list of files that need merging
        #   Make sure we process a list of files written as a string
        def _load_data(filename, output_ws):
            if not property_manager.existsProperty("LoadAlgorithm"):
                raise RuntimeError, "SANS reduction not set up properly: missing load algorithm"
            p = property_manager.getProperty("LoadAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            alg.setProperty("Filename", filename)
            alg.setProperty("OutputWorkspace", output_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            msg = "Loaded %s\n" % filename
            if alg.existsProperty("OutputMessage"):
                msg = alg.getProperty("OutputMessage").value
            return msg

        # Get instrument to use with FileFinder
        instrument = ''
        if property_manager.existsProperty("InstrumentName"):
            instrument = property_manager.getProperty("InstrumentName").value

        output_str = ''
        if type(data_file) == str:
            data_file = find_data(data_file,
                                  instrument=instrument,
                                  allow_multiple=True)
        if type(data_file) == list:
            monitor = 0.0
            timer = 0.0
            for i in range(len(data_file)):
                if i == 0:
                    output_str += _load_data(data_file[i], workspace)
                    # Use the first file location as the default output directory
                    head, tail = os.path.split(data_file[0])
                    if os.path.isdir(head):
                        self.default_output_dir = head
                else:
                    output_str += _load_data(data_file[i], '__tmp_wksp')
                    api.Plus(LHSWorkspace=workspace,
                             RHSWorkspace='__tmp_wksp',
                             OutputWorkspace=workspace)
                    # Get the monitor and timer values
                    ws = AnalysisDataService.retrieve('__tmp_wksp')
                    monitor += ws.getRun().getProperty("monitor").value
                    timer += ws.getRun().getProperty("timer").value

            # Get the monitor and timer of the first file, which haven't yet
            # been added to the total
            ws = AnalysisDataService.retrieve(workspace)
            monitor += ws.getRun().getProperty("monitor").value
            timer += ws.getRun().getProperty("timer").value

            # Update the timer and monitor
            ws.getRun().addProperty("monitor", monitor, True)
            ws.getRun().addProperty("timer", timer, True)

            if AnalysisDataService.doesExist('__tmp_wksp'):
                AnalysisDataService.remove('__tmp_wksp')
        else:
            output_str += "Loaded %s\n" % data_file
            output_str += _load_data(data_file, workspace)
            head, tail = os.path.split(data_file)
            if os.path.isdir(head):
                self.default_output_dir = head
        return output_str
Пример #23
0
def BeamMonitorNormalization(reference_flux_file):
    reference_flux_file = find_data(reference_flux_file, instrument=ReductionSingleton().get_instrument())
    ReductionSingleton().reduction_properties["Normalisation"]="Monitor"
    ReductionSingleton().reduction_properties["MonitorReferenceFile"]=reference_flux_file
Пример #24
0
    def PyExec(self):  # noqa: C901
        # Get the reduction property manager
        property_manager_name = self.getProperty("ReductionProperties").value
        property_manager = PropertyManagerDataService.retrieve(property_manager_name)

        # Build the name we are going to give the transmission workspace
        sample_scatt = self.getPropertyValue("SampleScatteringFilename")
        sample_basename = os.path.basename(sample_scatt)
        entry_name = "TransmissionSpreader%s" % sample_scatt
        trans_ws_name = "__transmission_fit_%s" % sample_basename
        trans_ws = None

        # If we have already computed the transmission, used the
        # previously computed workspace
        if property_manager.existsProperty(entry_name):
            trans_ws_name = property_manager.getProperty(entry_name)
            if AnalysisDataService.doesExist(trans_ws_name):
                trans_ws = AnalysisDataService.retrieve(trans_ws_name)

        # Get instrument to use with FileFinder
        instrument = ''
        if property_manager.existsProperty("InstrumentName"):
            instrument = property_manager.getProperty("InstrumentName").value

        # Get the data loader
        def _load_data(filename, output_ws):
            if not property_manager.existsProperty("LoadAlgorithm"):
                Logger("SANSBeamSpreaderTransmission").error("SANS reduction not set up properly: missing load algorithm")
                raise RuntimeError("SANS reduction not set up properly: missing load algorithm")
            p=property_manager.getProperty("LoadAlgorithm")
            alg=Algorithm.fromString(p.valueAsStr)
            alg.setProperty("Filename", filename)
            alg.setProperty("OutputWorkspace", output_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            msg = ''
            if alg.existsProperty("OutputMessage"):
                msg = alg.getProperty("OutputMessage").value
            return msg

        # Compute the transmission if we don't already have it
        if trans_ws is None:
            # Load data files
            sample_spreader_ws = "__trans_sample_spreader"
            direct_spreader_ws = "__trans_direct_spreader"
            sample_scatt_ws = "__trans_sample_scatt"
            direct_scatt_ws = "__trans_direct_scatt"

            sample_spread = self.getPropertyValue("SampleSpreaderFilename")
            direct_spread = self.getPropertyValue("DirectSpreaderFilename")
            direct_scatt = self.getPropertyValue("DirectScatteringFilename")

            ws_names = [[sample_spread, sample_spreader_ws],
                        [direct_spread, direct_spreader_ws],
                        [sample_scatt, sample_scatt_ws],
                        [direct_scatt, direct_scatt_ws]]

            for f in ws_names:
                filepath = find_data(f[0], instrument=instrument)
                _load_data(filepath, f[1])
                self._subtract_dark_current(f[1], property_manager)

            # Get normalization for transmission calculation
            monitor_det_ID = None
            if property_manager.existsProperty("TransmissionNormalisation"):
                sample_ws = AnalysisDataService.retrieve(sample_scatt_ws)
                if property_manager.getProperty("TransmissionNormalisation").value=="Monitor":
                    monitor_det_ID = int(sample_ws.getInstrument().getNumberParameter("default-incident-monitor-spectrum")[0])
                else:
                    monitor_det_ID = int(sample_ws.getInstrument().getNumberParameter("default-incident-timer-spectrum")[0])
            elif property_manager.existsProperty("NormaliseAlgorithm"):
                def _normalise(workspace):
                    p=property_manager.getProperty("NormaliseAlgorithm")
                    alg=Algorithm.fromString(p.valueAsStr)
                    alg.setProperty("InputWorkspace", workspace)
                    alg.setProperty("OutputWorkspace", workspace)
                    if alg.existsProperty("ReductionProperties"):
                        alg.setProperty("ReductionProperties", property_manager_name)
                    alg.execute()
                    msg = ''
                    if alg.existsProperty("OutputMessage"):
                        msg += alg.getProperty("OutputMessage").value+'\n'
                    return msg
                for f in ws_names:
                    _normalise(f[1])

            # Calculate transmission. Use the reduction method's normalization channel (time or beam monitor)
            # as the monitor channel.
            spreader_t_value = self.getPropertyValue("SpreaderTransmissionValue")
            spreader_t_error = self.getPropertyValue("SpreaderTransmissionError")

            alg = AlgorithmManager.createUnmanaged('CalculateTransmissionBeamSpreader')
            alg.initialize()
            alg.setProperty("SampleSpreaderRunWorkspace", sample_spreader_ws)
            alg.setProperty("DirectSpreaderRunWorkspace", direct_spreader_ws)
            alg.setProperty("SampleScatterRunWorkspace", sample_scatt_ws)
            alg.setProperty("DirectScatterRunWorkspace", direct_scatt_ws)
            alg.setProperty("IncidentBeamMonitor", monitor_det_ID)
            alg.setProperty("OutputWorkspace",trans_ws_name)
            alg.setProperty("SpreaderTransmissionValue",spreader_t_value)
            alg.setProperty("SpreaderTransmissionError",spreader_t_error)
            alg.execute()

            trans_ws = AnalysisDataService.retrieve(trans_ws_name)

            for f in ws_names:
                if AnalysisDataService.doesExist(f[1]):
                    AnalysisDataService.remove(f[1])

        # 2- Apply correction (Note: Apply2DTransCorr)
        input_ws_name = self.getPropertyValue("InputWorkspace")
        if not AnalysisDataService.doesExist(input_ws_name):
            Logger("SANSBeamSpreaderTransmission").error("Could not find input workspace")
        workspace = AnalysisDataService.retrieve(input_ws_name).name()

        # Clone workspace to make boost-python happy
        api.CloneWorkspace(InputWorkspace=workspace,
                           OutputWorkspace='__'+workspace)
        workspace = '__'+workspace

        self._apply_transmission(workspace, trans_ws_name)

        trans = trans_ws.dataY(0)[0]
        error = trans_ws.dataE(0)[0]

        output_str = ''
        if len(trans_ws.dataY(0))==1:
            self.setProperty("MeasuredTransmission", trans)
            self.setProperty("MeasuredError", error)
            output_str = "\n%s   T = %6.2g += %6.2g\n" % (output_str, trans, error)
        output_msg = "Transmission correction applied [%s]%s\n" % (trans_ws_name, output_str)

        output_ws = AnalysisDataService.retrieve(workspace)
        self.setProperty("OutputWorkspace", output_ws)
        self.setPropertyValue("OutputMessage", output_msg)
Пример #25
0
    def _hfir_scaling(self, property_manager):
        property_manager_name = self.getProperty("ReductionProperties").value
        input_ws = self.getProperty("InputWorkspace").value
        output_ws_name = self.getPropertyValue("OutputWorkspace")
        output_msg = ""

        # Load data file
        data_file = self.getProperty("ReferenceDataFilename").value
        filepath = find_data(data_file, instrument=self.instrument)

        ref_basename = os.path.basename(filepath)
        ref_ws_name = "__abs_scale_%s" % ref_basename

        def _load_data(filename, output_ws):
            if not property_manager.existsProperty("LoadAlgorithm"):
                Logger("SANSDirectBeamTransmission").error("SANS reduction not set up properly: missing load algorithm")
                raise RuntimeError, "SANS reduction not set up properly: missing load algorithm"
            p=property_manager.getProperty("LoadAlgorithm")
            alg=Algorithm.fromString(p.valueAsStr)
            alg.setChild(True)
            alg.setProperty("Filename", filename)
            alg.setProperty("OutputWorkspace", output_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            msg = ''
            if alg.existsProperty("OutputMessage"):
                msg = alg.getProperty("OutputMessage").value
            ws = alg.getProperty("OutputWorkspace").value
            return ws, msg

        ref_ws, msg = _load_data(filepath, ref_ws_name)
        output_msg += msg+'\n'

        # Get monitor value:
        # This call is left unprotected because it should fail if that property
        # doesn't exist. It's the responsibility of the parent algorithm to
        # catch that error.
        monitor_prop = property_manager.getProperty("NormaliseAlgorithm")
        alg=Algorithm.fromString(monitor_prop.valueAsStr)
        monitor_id = alg.getPropertyValue("NormalisationType").lower()

        monitor_value = ref_ws.getRun().getProperty(monitor_id.lower()).value
        # HFIR-specific: If we count for monitor we need to multiply by 1e8
        # Need to be consistent with the Normalization step
        if monitor_id == "monitor":
            monitor_value /= 1.0e8

        # Get sample-detector distance
        sdd = ref_ws.getRun().getProperty("sample_detector_distance").value

        # Get the beamstop diameter
        beam_diameter = self.getProperty("BeamstopDiameter").value
        if beam_diameter <= 0:
            if ref_ws.getRun().hasProperty("beam-diameter"):
                beam_diameter = ref_ws.getRun().getProperty("beam-diameter").value
                Logger("SANSAbsoluteScale").debug("Found beamstop diameter: %g" % beam_diameter)
            else:
                raise RuntimeError, "AbsoluteScale could not read the beam radius and none was provided"

        # Apply sensitivity correction
        apply_sensitivity = self.getProperty("ApplySensitivity").value
        if apply_sensitivity and property_manager.existsProperty("SensitivityAlgorithm"):
            p=property_manager.getProperty("SensitivityAlgorithm")
            alg=Algorithm.fromString(p.valueAsStr)
            alg.setChild(True)
            alg.setProperty("InputWorkspace", ref_ws)
            alg.setProperty("OutputWorkspace", ref_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            if alg.existsProperty("OutputMessage"):
                output_msg += alg.getProperty("OutputMessage").value+'\n'

        # Get the reference count
        Logger("SANSAbsoluteScale").information("Using beamstop diameter: %g" % beam_diameter)
        det_count = 1
        cylXML = '<infinite-cylinder id="asbsolute_scale">' + \
                   '<centre x="0.0" y="0.0" z="0.0" />' + \
                   '<axis x="0.0" y="0.0" z="1.0" />' + \
                   '<radius val="%12.10f" />' % (beam_diameter/2000.0) + \
                 '</infinite-cylinder>\n'

        alg = AlgorithmManager.create("FindDetectorsInShape")
        alg.initialize()
        alg.setChild(True)
        alg.setProperty("Workspace", ref_ws)
        alg.setPropertyValue("ShapeXML", cylXML)
        alg.execute()
        det_list = alg.getProperty("DetectorList").value
        det_list_str = alg.getPropertyValue("DetectorList")

        det_count_ws_name = "__absolute_scale"
        alg = AlgorithmManager.create("GroupDetectors")
        alg.initialize()
        alg.setChild(True)
        alg.setProperty("InputWorkspace", ref_ws)
        alg.setProperty("OutputWorkspace", det_count_ws_name)
        alg.setPropertyValue("KeepUngroupedSpectra", "0")
        alg.setPropertyValue("DetectorList", det_list_str)
        alg.execute()
        det_count_ws = alg.getProperty("OutputWorkspace").value
        det_count = det_count_ws.readY(0)[0]
        Logger("SANSAbsoluteScale").information("Reference detector counts: %g" % det_count)
        if det_count <= 0:
            Logger("SANSAbsoluteScale").error("Bad reference detector count: check your beam parameters")

        # Pixel size, in mm
        pixel_size_param = ref_ws.getInstrument().getNumberParameter("x-pixel-size")
        if pixel_size_param is not None:
            pixel_size = pixel_size_param[0]
        else:
            raise RuntimeError, "AbsoluteScale could not read the pixel size"

        attenuator_trans = self.getProperty("AttenuatorTransmission").value
        # (detector count rate)/(attenuator transmission)/(monitor rate)*(pixel size/SDD)**2
        scaling_factor = 1.0/(det_count/attenuator_trans/(monitor_value)*(pixel_size/sdd)*(pixel_size/sdd))

        # Apply the scaling factor
        alg = AlgorithmManager.create("Scale")
        alg.initialize()
        alg.setChild(True)
        alg.setProperty("InputWorkspace", input_ws)
        alg.setProperty("OutputWorkspace", output_ws_name)
        alg.setProperty("Factor", scaling_factor)
        alg.setPropertyValue("Operation", "Multiply")
        alg.execute()
        output_ws = alg.getProperty("OutputWorkspace").value
        Logger("SANSAbsoluteScale").notice( "Applied scaling factor %15.15f" % scaling_factor)

        output_msg = output_msg.replace('\n','\n   |')
        output_msg = "Applied scaling factor %g\n%s" % (scaling_factor, output_msg)

        self.setProperty("OutputWorkspace", output_ws)
        self.setProperty("OutputMessage", output_msg)
Пример #26
0
    def _hfir_scaling(self, property_manager):
        property_manager_name = self.getProperty("ReductionProperties").value
        input_ws = self.getProperty("InputWorkspace").value
        output_ws_name = self.getPropertyValue("OutputWorkspace")
        output_msg = ""

        # Load data file
        data_file = self.getProperty("ReferenceDataFilename").value
        filepath = find_data(data_file, instrument=self.instrument)

        ref_basename = os.path.basename(filepath)
        ref_ws_name = "__abs_scale_%s" % ref_basename

        def _load_data(filename, output_ws):
            if not property_manager.existsProperty("LoadAlgorithm"):
                Logger("SANSDirectBeamTransmission").error(
                    "SANS reduction not set up properly: missing load algorithm"
                )
                raise RuntimeError(
                    "SANS reduction not set up properly: missing load algorithm"
                )
            p = property_manager.getProperty("LoadAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            alg.setChild(True)
            alg.setProperty("Filename", filename)
            alg.setProperty("OutputWorkspace", output_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            msg = ''
            if alg.existsProperty("OutputMessage"):
                msg = alg.getProperty("OutputMessage").value
            ws = alg.getProperty("OutputWorkspace").value
            return ws, msg

        ref_ws, msg = _load_data(filepath, ref_ws_name)
        output_msg += msg + '\n'

        # Get monitor value:
        # This call is left unprotected because it should fail if that property
        # doesn't exist. It's the responsibility of the parent algorithm to
        # catch that error.
        monitor_prop = property_manager.getProperty("NormaliseAlgorithm")
        alg = Algorithm.fromString(monitor_prop.valueAsStr)
        monitor_id = alg.getPropertyValue("NormalisationType").lower()

        monitor_value = ref_ws.getRun().getProperty(monitor_id.lower()).value
        # HFIR-specific: If we count for monitor we need to multiply by 1e8
        # Need to be consistent with the Normalization step
        if monitor_id == "monitor":
            monitor_value /= 1.0e8

        # Get sample-detector distance
        sdd = ref_ws.getRun().getProperty("sample_detector_distance").value

        # Get the beamstop diameter
        beam_diameter = self.getProperty("BeamstopDiameter").value
        if beam_diameter <= 0:
            if ref_ws.getRun().hasProperty("beam-diameter"):
                beam_diameter = ref_ws.getRun().getProperty(
                    "beam-diameter").value
                Logger("SANSAbsoluteScale").debug(
                    "Found beamstop diameter: %g" % beam_diameter)
            else:
                raise RuntimeError(
                    "AbsoluteScale could not read the beam radius and none was provided"
                )

        # Apply sensitivity correction
        apply_sensitivity = self.getProperty("ApplySensitivity").value
        if apply_sensitivity and property_manager.existsProperty(
                "SensitivityAlgorithm"):
            p = property_manager.getProperty("SensitivityAlgorithm")
            alg = Algorithm.fromString(p.valueAsStr)
            alg.setChild(True)
            alg.setProperty("InputWorkspace", ref_ws)
            alg.setProperty("OutputWorkspace", ref_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            if alg.existsProperty("OutputMessage"):
                output_msg += alg.getProperty("OutputMessage").value + '\n'

        # Get the reference count
        Logger("SANSAbsoluteScale").information("Using beamstop diameter: %g" %
                                                beam_diameter)
        det_count = 1
        cylXML = '<infinite-cylinder id="asbsolute_scale">' + \
            '<centre x="0.0" y="0.0" z="0.0" />' + \
            '<axis x="0.0" y="0.0" z="1.0" />' + \
            '<radius val="%12.10f" />' % (beam_diameter/2000.0) + \
                 '</infinite-cylinder>\n'

        alg = AlgorithmManager.create("FindDetectorsInShape")
        alg.initialize()
        alg.setChild(True)
        alg.setProperty("Workspace", ref_ws)
        alg.setPropertyValue("ShapeXML", cylXML)
        alg.execute()
        #det_list = alg.getProperty("DetectorList").value
        det_list_str = alg.getPropertyValue("DetectorList")

        det_count_ws_name = "__absolute_scale"
        alg = AlgorithmManager.create("GroupDetectors")
        alg.initialize()
        alg.setChild(True)
        alg.setProperty("InputWorkspace", ref_ws)
        alg.setProperty("OutputWorkspace", det_count_ws_name)
        alg.setPropertyValue("KeepUngroupedSpectra", "0")
        alg.setPropertyValue("DetectorList", det_list_str)
        alg.execute()
        det_count_ws = alg.getProperty("OutputWorkspace").value
        det_count = det_count_ws.readY(0)[0]
        Logger("SANSAbsoluteScale").information(
            "Reference detector counts: %g" % det_count)
        if det_count <= 0:
            Logger("SANSAbsoluteScale").error(
                "Bad reference detector count: check your beam parameters")

        # Pixel size, in mm
        pixel_size_param = ref_ws.getInstrument().getNumberParameter(
            "x-pixel-size")
        if pixel_size_param is not None:
            pixel_size = pixel_size_param[0]
        else:
            raise RuntimeError("AbsoluteScale could not read the pixel size")

        attenuator_trans = self.getProperty("AttenuatorTransmission").value
        # (detector count rate)/(attenuator transmission)/(monitor rate)*(pixel size/SDD)**2
        scaling_factor = 1.0 / (det_count / attenuator_trans /
                                (monitor_value) * (pixel_size / sdd) *
                                (pixel_size / sdd))

        # Apply the scaling factor
        alg = AlgorithmManager.create("Scale")
        alg.initialize()
        alg.setChild(True)
        alg.setProperty("InputWorkspace", input_ws)
        alg.setProperty("OutputWorkspace", output_ws_name)
        alg.setProperty("Factor", scaling_factor)
        alg.setPropertyValue("Operation", "Multiply")
        alg.execute()
        output_ws = alg.getProperty("OutputWorkspace").value
        Logger("SANSAbsoluteScale").notice("Applied scaling factor %15.15f" %
                                           scaling_factor)

        output_msg = output_msg.replace('\n', '\n   |')
        output_msg = "Applied scaling factor %g\n%s" % (scaling_factor,
                                                        output_msg)

        self.setProperty("OutputWorkspace", output_ws)
        self.setProperty("OutputMessage", output_msg)
    def PyExec(self):        
        # Get the reduction property manager
        property_manager_name = self.getProperty("ReductionProperties").value
        property_manager = PropertyManagerDataService.retrieve(property_manager_name)
        
        # Build the name we are going to give the transmission workspace
        sample_scatt = self.getPropertyValue("SampleScatteringFilename")
        sample_basename = os.path.basename(sample_scatt)
        entry_name = "TransmissionSpreader%s" % sample_scatt
        trans_ws_name = "__transmission_fit_%s" % sample_basename
        trans_ws = None
        
        # If we have already computed the transmission, used the 
        # previously computed workspace
        if property_manager.existsProperty(entry_name):
            trans_ws_name = property_manager.getProperty(entry_name)
            if AnalysisDataService.doesExist(trans_ws_name):
                trans_ws = AnalysisDataService.retrieve(trans_ws_name)
        
        # Get instrument to use with FileFinder
        instrument = ''
        if property_manager.existsProperty("InstrumentName"):
            instrument = property_manager.getProperty("InstrumentName").value

        # Get the data loader
        def _load_data(filename, output_ws):
            if not property_manager.existsProperty("LoadAlgorithm"):
                Logger("SANSBeamSpreaderTransmission").error("SANS reduction not set up properly: missing load algorithm")
                raise RuntimeError, "SANS reduction not set up properly: missing load algorithm"
            p=property_manager.getProperty("LoadAlgorithm")
            alg=Algorithm.fromString(p.valueAsStr)
            alg.setProperty("Filename", filename)
            alg.setProperty("OutputWorkspace", output_ws)
            if alg.existsProperty("ReductionProperties"):
                alg.setProperty("ReductionProperties", property_manager_name)
            alg.execute()
            msg = ''
            if alg.existsProperty("OutputMessage"):
                msg = alg.getProperty("OutputMessage").value
            return msg

        # Compute the transmission if we don't already have it
        if trans_ws is None:
            # Load data files
            sample_spreader_ws = "__trans_sample_spreader"
            direct_spreader_ws = "__trans_direct_spreader"
            sample_scatt_ws = "__trans_sample_scatt"
            direct_scatt_ws = "__trans_direct_scatt"
            
            sample_spread = self.getPropertyValue("SampleSpreaderFilename")
            direct_spread = self.getPropertyValue("DirectSpreaderFilename")
            direct_scatt = self.getPropertyValue("DirectScatteringFilename")
            
            ws_names = [[sample_spread, sample_spreader_ws],
                        [direct_spread, direct_spreader_ws],
                        [sample_scatt, sample_scatt_ws],
                        [direct_scatt, direct_scatt_ws]]
            dark_current_data = self.getPropertyValue("DarkCurrentFilename")

            for f in ws_names:
                filepath = find_data(f[0], instrument=instrument)
                _load_data(filepath, f[1])
                self._subtract_dark_current(f[1], property_manager)
            
            # Get normalization for transmission calculation
            monitor_det_ID = None
            if property_manager.existsProperty("TransmissionNormalisation"):
                sample_ws = AnalysisDataService.retrieve(sample_scatt_ws)
                if property_manager.getProperty("TransmissionNormalisation").value=="Monitor":
                    monitor_det_ID = int(sample_ws.getInstrument().getNumberParameter("default-incident-monitor-spectrum")[0])
                else:
                    monitor_det_ID = int(sample_ws.getInstrument().getNumberParameter("default-incident-timer-spectrum")[0])
            elif property_manager.existsProperty("NormaliseAlgorithm"):
                def _normalise(workspace):
                    p=property_manager.getProperty("NormaliseAlgorithm")
                    alg=Algorithm.fromString(p.valueAsStr)
                    alg.setProperty("InputWorkspace", workspace)
                    alg.setProperty("OutputWorkspace", workspace)
                    if alg.existsProperty("ReductionProperties"):
                        alg.setProperty("ReductionProperties", property_manager_name)
                    alg.execute()
                    msg = ''
                    if alg.existsProperty("OutputMessage"):
                        msg += alg.getProperty("OutputMessage").value+'\n'
                    return msg
                for f in ws_names:
                    _normalise(f[1])
            
            # Calculate transmission. Use the reduction method's normalization channel (time or beam monitor)
            # as the monitor channel.
            spreader_t_value = self.getPropertyValue("SpreaderTransmissionValue")
            spreader_t_error = self.getPropertyValue("SpreaderTransmissionError")
 
            alg = AlgorithmManager.createUnmanaged('CalculateTransmissionBeamSpreader')
            alg.initialize()
            alg.setProperty("SampleSpreaderRunWorkspace", sample_spreader_ws)
            alg.setProperty("DirectSpreaderRunWorkspace", direct_spreader_ws)
            alg.setProperty("SampleScatterRunWorkspace", sample_scatt_ws)
            alg.setProperty("DirectScatterRunWorkspace", direct_scatt_ws)
            alg.setProperty("IncidentBeamMonitor", monitor_det_ID)
            alg.setProperty("OutputWorkspace",trans_ws_name)
            alg.setProperty("SpreaderTransmissionValue",spreader_t_value)
            alg.setProperty("SpreaderTransmissionError",spreader_t_error)
            alg.execute()
            
            trans_ws = AnalysisDataService.retrieve(trans_ws_name)
            
            for f in ws_names:
                if AnalysisDataService.doesExist(f[1]):
                    AnalysisDataService.remove(f[1])                 
            
        # 2- Apply correction (Note: Apply2DTransCorr)      
        input_ws_name = self.getPropertyValue("InputWorkspace")
        if not AnalysisDataService.doesExist(input_ws_name):
            Logger("SANSBeamSpreaderTransmission").error("Could not find input workspace")
        workspace = AnalysisDataService.retrieve(input_ws_name).getName()
        
        # Clone workspace to make boost-python happy
        api.CloneWorkspace(InputWorkspace=workspace,
                           OutputWorkspace='__'+workspace)
        workspace = '__'+workspace

        self._apply_transmission(workspace, trans_ws_name)

        trans = trans_ws.dataY(0)[0]
        error = trans_ws.dataE(0)[0]
        
        output_str = ''
        if len(trans_ws.dataY(0))==1:
            self.setProperty("MeasuredTransmission", trans)
            self.setProperty("MeasuredError", error)
            output_str = "\n%s   T = %6.2g += %6.2g\n" % (output_str, trans, error)
        output_msg = "Transmission correction applied [%s]%s\n" % (trans_ws_name, output_str)
        
        output_ws = AnalysisDataService.retrieve(workspace)
        self.setProperty("OutputWorkspace", output_ws)
        self.setPropertyValue("OutputMessage", output_msg)
Пример #28
0
def BeamMonitorNormalization(reference_flux_file):
    reference_flux_file = find_data(
        reference_flux_file, instrument=ReductionSingleton().get_instrument())
    ReductionSingleton().reduction_properties["Normalisation"] = "Monitor"
    ReductionSingleton(
    ).reduction_properties["MonitorReferenceFile"] = reference_flux_file