示例#1
0
 def performOperation(self):
     lhs_valid, rhs_valid, err_msg = self.validateInputs()
     if err_msg != str():
         return lhs_valid, rhs_valid, err_msg
     lhs_ws, rhs_ws = self._scale_input_workspaces()
     try:
         if self._operation == '+':
             if self._md_lhs or self._md_rhs:
                 PlusMD(LHSWorkspace=lhs_ws,
                        RHSWorkspace=rhs_ws,
                        OutputWorkspace=self._output_ws)
             else:
                 Plus(LHSWorkspace=lhs_ws,
                      RHSWorkspace=rhs_ws,
                      OutputWorkspace=self._output_ws)
         elif self._operation == '-':
             if self._md_lhs or self._md_rhs:
                 MinusMD(LHSWorkspace=lhs_ws,
                         RHSWorkspace=rhs_ws,
                         OutputWorkspace=self._output_ws)
             else:
                 Minus(LHSWorkspace=lhs_ws,
                       RHSWorkspace=rhs_ws,
                       OutputWorkspace=self._output_ws)
         elif self._operation == '*':
             if self._md_lhs or self._md_rhs:
                 MultiplyMD(LHSWorkspace=lhs_ws,
                            RHSWorkspace=rhs_ws,
                            OutputWorkspace=self._output_ws)
             else:
                 Multiply(LHSWorkspace=lhs_ws,
                          RHSWorkspace=rhs_ws,
                          OutputWorkspace=self._output_ws)
         elif self._operation == 'WM':
             if self._md_lhs or self._md_rhs:
                 WeightedMeanMD(LHSWorkspace=lhs_ws,
                                RHSWorkspace=rhs_ws,
                                OutputWorkspace=self._output_ws)
             else:
                 WeightedMean(InputWorkspace1=lhs_ws,
                              InputWorkspace2=rhs_ws,
                              OutputWorkspace=self._output_ws)
         else:
             if self._md_lhs or self._md_rhs:
                 DivideMD(LHSWorkspace=lhs_ws,
                          RHSWorkspace=rhs_ws,
                          OutputWorkspace=self._output_ws)
             else:
                 Divide(LHSWorkspace=lhs_ws,
                        RHSWorkspace=rhs_ws,
                        OutputWorkspace=self._output_ws)
     except (RuntimeError, ValueError) as err:
         return False, False, str(err)
     else:
         self._regularize_output_names(self._output_ws)
     finally:
         DeleteWorkspaces(WorkspaceList=[lhs_ws, rhs_ws])
     return True, True, ""
示例#2
0
 def _two_factor_corrections_approximation(self, sample_workspace,
                                           container_workspace,
                                           factor_workspaces):
     acc = factor_workspaces['acc']
     ass = factor_workspaces['ass']
     minuend = Divide(sample_workspace, ass, StoreInADS=False)
     subtrahend = Divide(container_workspace, acc, StoreInADS=False)
     difference = Minus(minuend, subtrahend, OutputWorkspace="__difference")
     return difference
示例#3
0
 def estimate_background(self, ws_name, niter, xwindow, doSGfilter):
     try:
         ws_bg = EnggEstimateFocussedBackground(InputWorkspace=ws_name, OutputWorkspace=ws_name + "_bg",
                                                NIterations=niter, XWindow=xwindow, ApplyFilterSG=doSGfilter)
     except (ValueError, RuntimeError) as e:
         # ValueError when Niter not positive integer, RuntimeError when Window too small
         logger.error("Error on arguments supplied to EnggEstimateFocusedBackground: " + str(e))
         ws_bg = SetUncertainties(InputWorkspace=ws_name)  # copy data and zero errors
         ws_bg = Minus(LHSWorkspace=ws_bg, RHSWorkspace=ws_bg)  # workspace of zeros with same num spectra
     return ws_bg
 def correctSampleData(self, sampleWsName, useVana, vanaWsName, useEmpty,
                       emptyWsName):
     if useEmpty:
         Minus(LHSWorkspace=sampleWsName,
               RHSWorkspace=emptyWsName,
               OutputWorkspace=sampleWsName)
     if useVana:
         Divide(LHSWorkspace=sampleWsName,
                RHSWorkspace=vanaWsName,
                OutputWorkspace=sampleWsName)
示例#5
0
 def plot_background_figure(self, ws_name):
     ws = self._loaded_workspaces[ws_name]
     ws_bgsub = self._bg_sub_workspaces[ws_name]
     if ws_bgsub:
         fig, ax = subplots(2, 1, sharex=True, gridspec_kw={'height_ratios': [2, 1]},
                            subplot_kw={'projection': 'mantid'})
         bg = Minus(LHSWorkspace=ws_name, RHSWorkspace=ws_bgsub, StoreInADS=False)
         ax[0].plot(ws, 'x')
         ax[1].plot(ws_bgsub, 'x')
         ax[0].plot(bg, '-r')
         fig.show()
示例#6
0
 def _three_factor_corrections_approximation(self, sample_workspace,
                                             container_workspace,
                                             factor_workspaces):
     acc = factor_workspaces['acc']
     acsc = factor_workspaces['acsc']
     assc = factor_workspaces['assc']
     subtrahend = Multiply(container_workspace, (acsc / acc),
                           StoreInADS=False)
     difference = Minus(sample_workspace, subtrahend, StoreInADS=False)
     quotient = Divide(difference, assc, OutputWorkspace="__quotient")
     return quotient
示例#7
0
 def create_or_update_bgsub_ws(self, ws_name, bg_params):
     ws = self._loaded_workspaces[ws_name]
     ws_bg = self._bg_sub_workspaces[ws_name]
     if not ws_bg or self._bg_params[ws_name] == [] or bg_params[1:] != self._bg_params[ws_name][1:]:
         background = self.estimate_background(ws_name, *bg_params[1:])
         self._bg_params[ws_name] = bg_params
         bgsub_ws_name = ws_name + "_bgsub"
         bgsub_ws = Minus(LHSWorkspace=ws, RHSWorkspace=background, OutputWorkspace=bgsub_ws_name)
         self._bg_sub_workspaces[ws_name] = bgsub_ws
         DeleteWorkspace(background)
     else:
         logger.notice("Background workspace already calculated")
def _subtractEC(ws, ecWS, ecScaling, wsNames, wsCleanup, algorithmLogging):
    """Subtract empty container."""
    # out = in - ecScaling * EC
    scaledECWSName = wsNames.withSuffix('scaled_EC')
    scaledECWS = Scale(InputWorkspace=ecWS,
                       Factor=ecScaling,
                       OutputWorkspace=scaledECWSName,
                       EnableLogging=algorithmLogging)
    ecSubtractedWSName = wsNames.withSuffix('EC_subtracted')
    ecSubtractedWS = Minus(LHSWorkspace=ws,
                           RHSWorkspace=scaledECWS,
                           OutputWorkspace=ecSubtractedWSName,
                           EnableLogging=algorithmLogging)
    wsCleanup.cleanup(scaledECWS)
    return ecSubtractedWS
示例#9
0
 def do_background_subtraction(self, ws_name, bg_params):
     ws = self._loaded_workspaces[ws_name]
     ws_bg = self._background_workspaces[ws_name]
     bg_changed = False
     if ws_bg and bg_params[1:] != self._bg_params[ws_name][1:]:
         # add bg back on to data (but don't change bgsub status)
         self.undo_background_subtraction(ws_name, isBGsub=bg_params[0])
         bg_changed = True
     if bg_changed or not ws_bg:
         # re-evaluate background (or evaluate for first time)
         self._bg_params[ws_name] = bg_params
         ws_bg = self.estimate_background(ws_name, *bg_params[1:])
     # update bg sub status before Minus (updates plot which repopulates table)
     self._bg_params[ws_name][0] = bg_params[0]
     Minus(LHSWorkspace=ws, RHSWorkspace=ws_bg, OutputWorkspace=ws_name)
示例#10
0
 def _subtractFlatBkg(self, ws):
     """Return a workspace where a flat background has been subtracted from ws."""
     method = self.getProperty(Prop.BKG_METHOD).value
     if method == BkgMethod.OFF:
         return ws
     clonedWSName = self._names.withSuffix('cloned_for_flat_bkg')
     clonedWS = CloneWorkspace(
         InputWorkspace=ws,
         OutputWorkspace=clonedWSName,
         EnableLogging=self._subalgLogging
     )
     transposedWSName = self._names.withSuffix('transposed_clone')
     transposedWS = Transpose(
         InputWorkspace=clonedWS,
         OutputWorkspace=transposedWSName,
         EnableLogging=self._subalgLogging
     )
     self._cleanup.cleanup(clonedWS)
     ranges = self._flatBkgRanges(ws)
     polynomialDegree = 0 if self.getProperty(Prop.BKG_METHOD).value == BkgMethod.CONSTANT else 1
     transposedBkgWSName = self._names.withSuffix('transposed_flat_background')
     transposedBkgWS = CalculatePolynomialBackground(
         InputWorkspace=transposedWS,
         OutputWorkspace=transposedBkgWSName,
         Degree=polynomialDegree,
         XRanges=ranges,
         CostFunction='Unweighted least squares',
         EnableLogging=self._subalgLogging
     )
     self._cleanup.cleanup(transposedWS)
     bkgWSName = self._names.withSuffix('flat_background')
     bkgWS = Transpose(
         InputWorkspace=transposedBkgWS,
         OutputWorkspace=bkgWSName,
         EnableLogging=self._subalgLogging
     )
     self._cleanup.cleanup(transposedBkgWS)
     subtractedWSName = self._names.withSuffix('flat_background_subtracted')
     subtractedWS = Minus(
         LHSWorkspace=ws,
         RHSWorkspace=bkgWS,
         OutputWorkspace=subtractedWSName,
         EnableLogging=self._subalgLogging
     )
     self._cleanup.cleanup(ws)
     self._cleanup.cleanup(bkgWS)
     return subtractedWS
示例#11
0
    def plot_background_figure(self, ws_name):
        def on_draw(event):
            if event.canvas.signalsBlocked():
                # This stops infinite loop as draw() is called within this handle (and set signalsBlocked == True)
                # Resets signalsBlocked to False (default value)
                event.canvas.blockSignals(False)
            else:
                axes = event.canvas.figure.get_axes()
                data_line = next(
                    (line for line in axes[0].get_tracked_artists()), None)
                bg_line = next((line for line in axes[0].get_lines()
                                if line not in axes[0].get_tracked_artists()),
                               None)
                bgsub_line = next(
                    (line for line in axes[1].get_tracked_artists()), None)
                if data_line and bg_line and bgsub_line:
                    event.canvas.blockSignals(
                        True
                    )  # this doesn't stop this handle being called again on canvas.draw()
                    bg_line.set_ydata(data_line.get_ydata() -
                                      bgsub_line.get_ydata())
                    event.canvas.draw()
                else:
                    # would like to close the fig at this point but this interferes with the mantid ADS observers when
                    # any of the tracked workspaces are deleted and causes mantid to hard crash - so just print warning
                    logger.warning(
                        f"Inspect background figure {event.canvas.figure.number} has been invalidated - the "
                        f"background curve will no longer be updated.")

        ws = self._data_workspaces[ws_name].loaded_ws
        ws_bgsub = self._data_workspaces[ws_name].bgsub_ws
        if ws_bgsub:
            fig, ax = subplots(2,
                               1,
                               sharex=True,
                               gridspec_kw={'height_ratios': [2, 1]},
                               subplot_kw={'projection': 'mantid'})
            bg = Minus(LHSWorkspace=ws_name,
                       RHSWorkspace=ws_bgsub,
                       StoreInADS=False)
            ax[0].plot(ws, 'x')
            ax[1].plot(ws_bgsub, 'x', label='background subtracted data')
            ax[0].plot(bg, '-r', label='background')
            ax[0].legend(fontsize=8.0)
            ax[1].legend(fontsize=8.0)
            fig.canvas.mpl_connect("draw_event", on_draw)
            fig.show()
示例#12
0
def _subtractFlatBkg(ws, wsType, bkgWorkspace, bkgScaling, wsNames, wsCleanup, algorithmLogging):
    """Subtract a scaled flat background from a workspace."""
    if wsType == common.WS_CONTENT_DETS:
        subtractedWSName = wsNames.withSuffix('flat_bkg_subtracted_detectors')
        scaledBkgWSName = wsNames.withSuffix('flat_bkg_for_detectors_scaled')
    else:
        subtractedWSName = wsNames.withSuffix('flat_bkg_subtracted_monitors')
        scaledBkgWSName = wsNames.withSuffix('flat_bkg_for_monitors_scaled')
    Scale(InputWorkspace=bkgWorkspace,
          OutputWorkspace=scaledBkgWSName,
          Factor=bkgScaling,
          EnableLogging=algorithmLogging)
    subtractedWS = Minus(LHSWorkspace=ws,
                         RHSWorkspace=scaledBkgWSName,
                         OutputWorkspace=subtractedWSName,
                         EnableLogging=algorithmLogging)
    wsCleanup.cleanup(scaledBkgWSName)
    return subtractedWS
def _subtractEC(ws, ecWS, ecScaling, wsNames, wsCleanup, algorithmLogging):
    """Subtract empty container."""
    # out = in - ecScaling * EC
    scalingWSName = wsNames.withSuffix('ecScaling')
    scalingWS = CreateSingleValuedWorkspace(OutputWorkspace=scalingWSName,
                                            DataValue=ecScaling,
                                            EnableLogging=algorithmLogging)
    scaledECWSName = wsNames.withSuffix('scaled_EC')
    scaledECWS = Multiply(LHSWorkspace=ecWS,
                          RHSWorkspace=scalingWS,
                          OutputWorkspace=scaledECWSName,
                          EnableLogging=algorithmLogging)
    ecSubtractedWSName = wsNames.withSuffix('EC_subtracted')
    ecSubtractedWS = Minus(LHSWorkspace=ws,
                           RHSWorkspace=scaledECWS,
                           OutputWorkspace=ecSubtractedWSName,
                           EnableLogging=algorithmLogging)
    wsCleanup.cleanup(scalingWS)
    wsCleanup.cleanup(scaledECWS)
    return ecSubtractedWS
def TotalScatteringReduction(config=None):
    facility = config['Facility']
    title = config['Title']
    instr = config['Instrument']

    # Get an instance to Mantid's logger
    log = Logger("TotalScatteringReduction")

    # Get sample info
    sample = get_sample(config)
    sam_mass_density = sample.get('MassDensity', None)
    sam_packing_fraction = sample.get('PackingFraction', None)
    sam_geometry = sample.get('Geometry', None)
    sam_material = sample.get('Material', None)

    sam_geo_dict = {
        'Shape': 'Cylinder',
        'Radius': config['Sample']['Geometry']['Radius'],
        'Height': config['Sample']['Geometry']['Height']
    }
    sam_mat_dict = {
        'ChemicalFormula': sam_material,
        'SampleMassDensity': sam_mass_density
    }
    if 'Environment' in config:
        sam_env_dict = {
            'Name': config['Environment']['Name'],
            'Container': config['Environment']['Container']
        }
    else:
        sam_env_dict = {'Name': 'InAir', 'Container': 'PAC06'}
    # Get normalization info
    van = get_normalization(config)
    van_mass_density = van.get('MassDensity', None)
    van_packing_fraction = van.get('PackingFraction', 1.0)
    van_geometry = van.get('Geometry', None)
    van_material = van.get('Material', 'V')

    van_geo_dict = {
        'Shape': 'Cylinder',
        'Radius': config['Normalization']['Geometry']['Radius'],
        'Height': config['Normalization']['Geometry']['Height']
    }
    van_mat_dict = {
        'ChemicalFormula': van_material,
        'SampleMassDensity': van_mass_density
    }

    # Get calibration, characterization, and other settings
    merging = config['Merging']
    binning = merging['QBinning']
    characterizations = merging.get('Characterizations', None)

    # Grouping
    grouping = merging.get('Grouping', None)
    cache_dir = config.get("CacheDir", os.path.abspath('.'))
    OutputDir = config.get("OutputDir", os.path.abspath('.'))

    # Create Nexus file basenames
    sample['Runs'] = expand_ints(sample['Runs'])
    sample['Background']['Runs'] = expand_ints(sample['Background'].get(
        'Runs', None))
    '''
    Currently not implemented:
    # wkspIndices = merging.get('SumBanks', None)
    # high_q_linear_fit_range = config['HighQLinearFitRange']

    POWGEN options not used
    #alignAndFocusArgs['RemovePromptPulseWidth'] = 50
    # alignAndFocusArgs['CompressTolerance'] use defaults
    # alignAndFocusArgs['UnwrapRef'] POWGEN option
    # alignAndFocusArgs['LowResRef'] POWGEN option
    # alignAndFocusArgs['LowResSpectrumOffset'] POWGEN option

    How much of each bank gets merged has info here in the form of
    # {"ID", "Qmin", "QMax"}
    # alignAndFocusArgs['CropWavelengthMin'] from characterizations file
    # alignAndFocusArgs['CropWavelengthMax'] from characterizations file
    '''

    if facility == 'SNS':
        facility_file_format = '%s_%d'
    else:
        facility_file_format = '%s%d'

    sam_scans = ','.join(
        [facility_file_format % (instr, num) for num in sample['Runs']])
    container_scans = ','.join([
        facility_file_format % (instr, num)
        for num in sample['Background']["Runs"]
    ])
    container_bg = None
    if "Background" in sample['Background']:
        sample['Background']['Background']['Runs'] = expand_ints(
            sample['Background']['Background']['Runs'])
        container_bg = ','.join([
            facility_file_format % (instr, num)
            for num in sample['Background']['Background']['Runs']
        ])
        if len(container_bg) == 0:
            container_bg = None

    van['Runs'] = expand_ints(van['Runs'])
    van_scans = ','.join(
        [facility_file_format % (instr, num) for num in van['Runs']])

    van_bg_scans = None
    if 'Background' in van:
        van_bg_scans = van['Background']['Runs']
        van_bg_scans = expand_ints(van_bg_scans)
        van_bg_scans = ','.join(
            [facility_file_format % (instr, num) for num in van_bg_scans])

    # Override Nexus file basename with Filenames if present
    if "Filenames" in sample:
        sam_scans = ','.join(sample["Filenames"])
    if "Filenames" in sample['Background']:
        container_scans = ','.join(sample['Background']["Filenames"])
    if "Background" in sample['Background']:
        if "Filenames" in sample['Background']['Background']:
            container_bg = ','.join(
                sample['Background']['Background']['Filenames'])
    if "Filenames" in van:
        van_scans = ','.join(van["Filenames"])
    if "Background" in van:
        if "Filenames" in van['Background']:
            van_bg_scans = ','.join(van['Background']["Filenames"])

    # Output nexus filename
    nexus_filename = title + '.nxs'
    try:
        os.remove(nexus_filename)
    except OSError:
        pass

    # Get sample corrections
    sam_abs_corr = sample.get("AbsorptionCorrection", None)
    sam_ms_corr = sample.get("MultipleScatteringCorrection", None)
    sam_inelastic_corr = SetInelasticCorrection(
        sample.get('InelasticCorrection', None))

    # Warn about having absorption correction and multiple scat correction set
    if sam_abs_corr and sam_ms_corr:
        log.warning(MS_AND_ABS_CORR_WARNING)

    # Compute the absorption correction on the sample if it was provided
    sam_abs_ws = ''
    con_abs_ws = ''
    if sam_abs_corr:
        msg = "Applying '{}' absorption correction to sample"
        log.notice(msg.format(sam_abs_corr["Type"]))
        sam_abs_ws, con_abs_ws = create_absorption_wksp(
            sam_scans, sam_abs_corr["Type"], sam_geo_dict, sam_mat_dict,
            sam_env_dict, **config)

    # Get vanadium corrections
    van_mass_density = van.get('MassDensity', van_mass_density)
    van_packing_fraction = van.get('PackingFraction', van_packing_fraction)
    van_abs_corr = van.get("AbsorptionCorrection", {"Type": None})
    van_ms_corr = van.get("MultipleScatteringCorrection", {"Type": None})
    van_inelastic_corr = SetInelasticCorrection(
        van.get('InelasticCorrection', None))

    # Warn about having absorption correction and multiple scat correction set
    if van_abs_corr["Type"] and van_ms_corr["Type"]:
        log.warning(MS_AND_ABS_CORR_WARNING)

    # Compute the absorption correction for the vanadium if provided
    van_abs_corr_ws = ''
    if van_abs_corr:
        msg = "Applying '{}' absorption correction to vanadium"
        log.notice(msg.format(van_abs_corr["Type"]))
        van_abs_corr_ws, van_con_ws = create_absorption_wksp(
            van_scans, van_abs_corr["Type"], van_geo_dict, van_mat_dict,
            **config)

    alignAndFocusArgs = dict()
    alignAndFocusArgs['CalFilename'] = config['Calibration']['Filename']
    # alignAndFocusArgs['GroupFilename'] don't use
    # alignAndFocusArgs['Params'] = "0.,0.02,40."
    alignAndFocusArgs['ResampleX'] = -6000
    alignAndFocusArgs['Dspacing'] = False
    alignAndFocusArgs['PreserveEvents'] = False
    alignAndFocusArgs['MaxChunkSize'] = 8
    alignAndFocusArgs['CacheDir'] = os.path.abspath(cache_dir)

    # Get any additional AlignAndFocusArgs from JSON input
    if "AlignAndFocusArgs" in config:
        otherArgs = config["AlignAndFocusArgs"]
        alignAndFocusArgs.update(otherArgs)

    # Setup grouping
    output_grouping = False
    grp_wksp = "wksp_output_group"

    if grouping:
        if 'Initial' in grouping:
            if grouping['Initial'] and not grouping['Initial'] == u'':
                alignAndFocusArgs['GroupFilename'] = grouping['Initial']
        if 'Output' in grouping:
            if grouping['Output'] and not grouping['Output'] == u'':
                output_grouping = True
                LoadDetectorsGroupingFile(InputFile=grouping['Output'],
                                          OutputWorkspace=grp_wksp)
    # If no output grouping specified, create it with Calibration Grouping
    if not output_grouping:
        LoadDiffCal(alignAndFocusArgs['CalFilename'],
                    InstrumentName=instr,
                    WorkspaceName=grp_wksp.replace('_group', ''),
                    MakeGroupingWorkspace=True,
                    MakeCalWorkspace=False,
                    MakeMaskWorkspace=False)

    # Setup the 6 bank method if no grouping specified
    if not grouping:
        CreateGroupingWorkspace(InstrumentName=instr,
                                GroupDetectorsBy='Group',
                                OutputWorkspace=grp_wksp)
        alignAndFocusArgs['GroupingWorkspace'] = grp_wksp

    # TODO take out the RecalculatePCharge in the future once tested
    # Load Sample
    print("#-----------------------------------#")
    print("# Sample")
    print("#-----------------------------------#")
    sam_wksp = load('sample', sam_scans, sam_geometry, sam_material,
                    sam_mass_density, sam_abs_ws, **alignAndFocusArgs)
    sample_title = "sample_and_container"
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    sam_molecular_mass = mtd[sam_wksp].sample().getMaterial(
    ).relativeMolecularMass()
    natoms = getNumberAtoms(sam_packing_fraction,
                            sam_mass_density,
                            sam_molecular_mass,
                            Geometry=sam_geometry)

    # Load Sample Container
    print("#-----------------------------------#")
    print("# Sample Container")
    print("#-----------------------------------#")
    container = load('container',
                     container_scans,
                     absorption_wksp=con_abs_ws,
                     **alignAndFocusArgs)
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Load Sample Container Background

    if container_bg is not None:
        print("#-----------------------------------#")
        print("# Sample Container's Background")
        print("#-----------------------------------#")
        container_bg = load('container_background', container_bg,
                            **alignAndFocusArgs)
        save_banks(InputWorkspace=container_bg,
                   Filename=nexus_filename,
                   Title=container_bg,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Load Vanadium

    print("#-----------------------------------#")
    print("# Vanadium")
    print("#-----------------------------------#")
    van_wksp = load('vanadium', van_scans, van_geometry, van_material,
                    van_mass_density, van_abs_corr_ws, **alignAndFocusArgs)
    vanadium_title = "vanadium_and_background"

    save_banks(InputWorkspace=van_wksp,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    van_material = mtd[van_wksp].sample().getMaterial()
    van_molecular_mass = van_material.relativeMolecularMass()
    nvan_atoms = getNumberAtoms(1.0,
                                van_mass_density,
                                van_molecular_mass,
                                Geometry=van_geometry)

    print("Sample natoms:", natoms)
    print("Vanadium natoms:", nvan_atoms)
    print("Vanadium natoms / Sample natoms:", nvan_atoms / natoms)

    # Load Vanadium Background
    van_bg = None
    if van_bg_scans is not None:
        print("#-----------------------------------#")
        print("# Vanadium Background")
        print("#-----------------------------------#")
        van_bg = load('vanadium_background', van_bg_scans, **alignAndFocusArgs)
        vanadium_bg_title = "vanadium_background"
        save_banks(InputWorkspace=van_bg,
                   Filename=nexus_filename,
                   Title=vanadium_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Load Instrument Characterizations
    if characterizations:
        PDDetermineCharacterizations(
            InputWorkspace=sam_wksp,
            Characterizations='characterizations',
            ReductionProperties='__snspowderreduction')
        propMan = PropertyManagerDataService.retrieve('__snspowderreduction')
        qmax = 2. * np.pi / propMan['d_min'].value
        qmin = 2. * np.pi / propMan['d_max'].value
        for a, b in zip(qmin, qmax):
            print('Qrange:', a, b)
        # TODO: Add when we apply Qmin, Qmax cropping
        # mask_info = generate_cropping_table(qmin, qmax)

    # STEP 1: Subtract Backgrounds

    sam_raw = 'sam_raw'
    CloneWorkspace(InputWorkspace=sam_wksp,
                   OutputWorkspace=sam_raw)  # for later

    container_raw = 'container_raw'
    CloneWorkspace(InputWorkspace=container,
                   OutputWorkspace=container_raw)  # for later

    if van_bg is not None:
        RebinToWorkspace(WorkspaceToRebin=van_bg,
                         WorkspaceToMatch=van_wksp,
                         OutputWorkspace=van_bg)
        Minus(LHSWorkspace=van_wksp,
              RHSWorkspace=van_bg,
              OutputWorkspace=van_wksp)

    RebinToWorkspace(WorkspaceToRebin=container,
                     WorkspaceToMatch=sam_wksp,
                     OutputWorkspace=container)
    Minus(LHSWorkspace=sam_wksp,
          RHSWorkspace=container,
          OutputWorkspace=sam_wksp)

    if container_bg is not None:
        RebinToWorkspace(WorkspaceToRebin=container_bg,
                         WorkspaceToMatch=container,
                         OutputWorkspace=container_bg)
        Minus(LHSWorkspace=container,
              RHSWorkspace=container_bg,
              OutputWorkspace=container)

    for wksp in [container, van_wksp, sam_wksp]:
        ConvertUnits(InputWorkspace=wksp,
                     OutputWorkspace=wksp,
                     Target="MomentumTransfer",
                     EMode="Elastic")
    container_title = "container_minus_back"
    vanadium_title = "vanadium_minus_back"
    sample_title = "sample_minus_back"
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=van_wksp,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 2.0: Prepare vanadium as normalization calibrant

    # Multiple-Scattering and Absorption (Steps 2-4) for Vanadium

    van_corrected = 'van_corrected'
    ConvertUnits(InputWorkspace=van_wksp,
                 OutputWorkspace=van_corrected,
                 Target="Wavelength",
                 EMode="Elastic")

    if "Type" in van_abs_corr:
        if van_abs_corr['Type'] == 'Carpenter' \
                or van_ms_corr['Type'] == 'Carpenter':
            CarpenterSampleCorrection(
                InputWorkspace=van_corrected,
                OutputWorkspace=van_corrected,
                CylinderSampleRadius=van['Geometry']['Radius'])
        elif van_abs_corr['Type'] == 'Mayers' \
                or van_ms_corr['Type'] == 'Mayers':
            if van_ms_corr['Type'] == 'Mayers':
                MayersSampleCorrection(InputWorkspace=van_corrected,
                                       OutputWorkspace=van_corrected,
                                       MultipleScattering=True)
            else:
                MayersSampleCorrection(InputWorkspace=van_corrected,
                                       OutputWorkspace=van_corrected,
                                       MultipleScattering=False)
        else:
            print("NO VANADIUM absorption or multiple scattering!")
    else:
        CloneWorkspace(InputWorkspace=van_corrected,
                       OutputWorkspace=van_corrected)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')
    vanadium_title += "_ms_abs_corrected"
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title + "_with_peaks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # TODO subtract self-scattering of vanadium (According to Eq. 7 of Howe,
    # McGreevey, and Howells, JPCM, 1989)

    # Smooth Vanadium (strip peaks plus smooth)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='dSpacing',
                 EMode='Elastic')

    # After StripVanadiumPeaks, the workspace goes from EventWorkspace ->
    # Workspace2D
    StripVanadiumPeaks(InputWorkspace=van_corrected,
                       OutputWorkspace=van_corrected,
                       BackgroundType='Quadratic')
    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')
    vanadium_title += '_peaks_stripped'
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='TOF',
                 EMode='Elastic')

    FFTSmooth(InputWorkspace=van_corrected,
              OutputWorkspace=van_corrected,
              Filter="Butterworth",
              Params='20,2',
              IgnoreXBins=True,
              AllSpectra=True)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    vanadium_title += '_smoothed'
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Inelastic correction
    if van_inelastic_corr['Type'] == "Placzek":
        van_scan = van['Runs'][0]
        van_incident_wksp = 'van_incident_wksp'
        van_inelastic_opts = van['InelasticCorrection']
        lambda_binning_fit = van_inelastic_opts['LambdaBinningForFit']
        lambda_binning_calc = van_inelastic_opts['LambdaBinningForCalc']
        print('van_scan:', van_scan)
        GetIncidentSpectrumFromMonitor(Filename=facility_file_format %
                                       (instr, van_scan),
                                       OutputWorkspace=van_incident_wksp)

        fit_type = van['InelasticCorrection']['FitSpectrumWith']
        FitIncidentSpectrum(InputWorkspace=van_incident_wksp,
                            OutputWorkspace=van_incident_wksp,
                            FitSpectrumWith=fit_type,
                            BinningForFit=lambda_binning_fit,
                            BinningForCalc=lambda_binning_calc,
                            PlotDiagnostics=False)

        van_placzek = 'van_placzek'

        SetSample(InputWorkspace=van_incident_wksp,
                  Material={
                      'ChemicalFormula': str(van_material),
                      'SampleMassDensity': str(van_mass_density)
                  })

        CalculatePlaczekSelfScattering(IncidentWorkspace=van_incident_wksp,
                                       ParentWorkspace=van_corrected,
                                       OutputWorkspace=van_placzek,
                                       L1=19.5,
                                       L2=alignAndFocusArgs['L2'],
                                       Polar=alignAndFocusArgs['Polar'])

        ConvertToHistogram(InputWorkspace=van_placzek,
                           OutputWorkspace=van_placzek)

        # Save before rebin in Q
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=binning,
                  PreserveEvents=True)

        save_banks(InputWorkspace=van_placzek,
                   Filename=nexus_filename,
                   Title="vanadium_placzek",
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

        # Rebin in Wavelength
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='Wavelength',
                         EMode='Elastic')
            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=lambda_binning_calc,
                  PreserveEvents=True)

        # Save after rebin in Q
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        # Subtract correction in Wavelength
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='Wavelength',
                         EMode='Elastic')
            if not mtd[wksp].isDistribution():
                ConvertToDistribution(wksp)

        Minus(LHSWorkspace=van_corrected,
              RHSWorkspace=van_placzek,
              OutputWorkspace=van_corrected)

        # Save after subtraction
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        vanadium_title += '_placzek_corrected'
        save_banks(InputWorkspace=van_corrected,
                   Filename=nexus_filename,
                   Title=vanadium_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    SetUncertainties(InputWorkspace=van_corrected,
                     OutputWorkspace=van_corrected,
                     SetError='zero')

    # STEP 2.1: Normalize by Vanadium

    wksp_list = [sam_wksp, sam_raw, van_corrected]
    for name in wksp_list:
        ConvertUnits(InputWorkspace=name,
                     OutputWorkspace=name,
                     Target='MomentumTransfer',
                     EMode='Elastic',
                     ConvertFromPointData=False)

        Rebin(InputWorkspace=name,
              OutputWorkspace=name,
              Params=binning,
              PreserveEvents=True)

    # Save the sample - back / normalized
    Divide(LHSWorkspace=sam_wksp,
           RHSWorkspace=van_corrected,
           OutputWorkspace=sam_wksp)

    sample_title += "_normalized"
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the sample / normalized (ie no background subtraction)
    Divide(LHSWorkspace=sam_raw,
           RHSWorkspace=van_corrected,
           OutputWorkspace=sam_raw)

    save_banks(InputWorkspace=sam_raw,
               Filename=nexus_filename,
               Title="sample_normalized",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Output an initial I(Q) for sample
    iq_filename = title + '_initial_iofq_banks.nxs'
    save_banks(InputWorkspace=sam_wksp,
               Filename=iq_filename,
               Title="IQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    wksp_list = [container, container_raw, van_corrected]
    if container_bg is not None:
        wksp_list.append(container_bg)
    if van_bg is not None:
        wksp_list.append(van_bg)

    for name in wksp_list:
        ConvertUnits(InputWorkspace=name,
                     OutputWorkspace=name,
                     Target='MomentumTransfer',
                     EMode='Elastic',
                     ConvertFromPointData=False)

        Rebin(InputWorkspace=name,
              OutputWorkspace=name,
              Params=binning,
              PreserveEvents=True)

    # Save the container - container_background / normalized
    Divide(LHSWorkspace=container,
           RHSWorkspace=van_corrected,
           OutputWorkspace=container)

    container_title += '_normalized'
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the container / normalized (ie no background subtraction)
    Divide(LHSWorkspace=container_raw,
           RHSWorkspace=van_corrected,
           OutputWorkspace=container_raw)

    save_banks(InputWorkspace=container_raw,
               Filename=nexus_filename,
               Title="container_normalized",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the container_background / normalized
    if container_bg is not None:
        Divide(LHSWorkspace=container_bg,
               RHSWorkspace=van_corrected,
               OutputWorkspace=container_bg)

        container_bg_title = "container_back_normalized"
        save_banks(InputWorkspace=container_bg,
                   Filename=nexus_filename,
                   Title=container_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Save the vanadium_background / normalized
    if van_bg is not None:
        Divide(LHSWorkspace=van_bg,
               RHSWorkspace=van_corrected,
               OutputWorkspace=van_bg)

        vanadium_bg_title += "_normalized"
        save_banks(InputWorkspace=van_bg,
                   Filename=nexus_filename,
                   Title=vanadium_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # STEP 3 & 4: Subtract multiple scattering and apply absorption correction

    ConvertUnits(InputWorkspace=sam_wksp,
                 OutputWorkspace=sam_wksp,
                 Target="Wavelength",
                 EMode="Elastic")

    sam_corrected = 'sam_corrected'
    if sam_abs_corr and sam_ms_corr:
        if sam_abs_corr['Type'] == 'Carpenter' \
                or sam_ms_corr['Type'] == 'Carpenter':
            CarpenterSampleCorrection(
                InputWorkspace=sam_wksp,
                OutputWorkspace=sam_corrected,
                CylinderSampleRadius=sample['Geometry']['Radius'])
        elif sam_abs_corr['Type'] == 'Mayers' \
                or sam_ms_corr['Type'] == 'Mayers':
            if sam_ms_corr['Type'] == 'Mayers':
                MayersSampleCorrection(InputWorkspace=sam_wksp,
                                       OutputWorkspace=sam_corrected,
                                       MultipleScattering=True)
            else:
                MayersSampleCorrection(InputWorkspace=sam_wksp,
                                       OutputWorkspace=sam_corrected,
                                       MultipleScattering=False)
        else:
            print("NO SAMPLE absorption or multiple scattering!")
            CloneWorkspace(InputWorkspace=sam_wksp,
                           OutputWorkspace=sam_corrected)

        ConvertUnits(InputWorkspace=sam_corrected,
                     OutputWorkspace=sam_corrected,
                     Target='MomentumTransfer',
                     EMode='Elastic')

        sample_title += "_ms_abs_corrected"
        save_banks(InputWorkspace=sam_corrected,
                   Filename=nexus_filename,
                   Title=sample_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)
    else:
        CloneWorkspace(InputWorkspace=sam_wksp, OutputWorkspace=sam_corrected)

    # STEP 5: Divide by number of atoms in sample

    mtd[sam_corrected] = (nvan_atoms / natoms) * mtd[sam_corrected]
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    sample_title += "_norm_by_atoms"
    save_banks(InputWorkspace=sam_corrected,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 6: Divide by total scattering length squared = total scattering
    # cross-section over 4 * pi
    van_material = mtd[van_corrected].sample().getMaterial()
    sigma_v = van_material.totalScatterXSection()
    prefactor = (sigma_v / (4. * np.pi))
    msg = "Total scattering cross-section of Vanadium:{} sigma_v / 4*pi: {}"
    print(msg.format(sigma_v, prefactor))

    mtd[sam_corrected] = prefactor * mtd[sam_corrected]
    sample_title += '_multiply_by_vanSelfScat'
    save_banks(InputWorkspace=sam_corrected,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 7: Inelastic correction
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target='Wavelength',
                 EMode='Elastic')

    if sam_inelastic_corr['Type'] == "Placzek":
        if sam_material is None:
            error = "For Placzek correction, must specifiy a sample material."
            raise Exception(error)
        for sam_scan in sample['Runs']:
            sam_incident_wksp = 'sam_incident_wksp'
            sam_inelastic_opts = sample['InelasticCorrection']
            lambda_binning_fit = sam_inelastic_opts['LambdaBinningForFit']
            lambda_binning_calc = sam_inelastic_opts['LambdaBinningForCalc']
            GetIncidentSpectrumFromMonitor(Filename=facility_file_format %
                                           (instr, sam_scan),
                                           OutputWorkspace=sam_incident_wksp)

            fit_type = sample['InelasticCorrection']['FitSpectrumWith']
            FitIncidentSpectrum(InputWorkspace=sam_incident_wksp,
                                OutputWorkspace=sam_incident_wksp,
                                FitSpectrumWith=fit_type,
                                BinningForFit=lambda_binning_fit,
                                BinningForCalc=lambda_binning_calc)

            sam_placzek = 'sam_placzek'
            SetSample(InputWorkspace=sam_incident_wksp,
                      Material={
                          'ChemicalFormula': str(sam_material),
                          'SampleMassDensity': str(sam_mass_density)
                      })
            CalculatePlaczekSelfScattering(IncidentWorkspace=sam_incident_wksp,
                                           ParentWorkspace=sam_corrected,
                                           OutputWorkspace=sam_placzek,
                                           L1=19.5,
                                           L2=alignAndFocusArgs['L2'],
                                           Polar=alignAndFocusArgs['Polar'])

            ConvertToHistogram(InputWorkspace=sam_placzek,
                               OutputWorkspace=sam_placzek)

        # Save before rebin in Q
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=binning,
                  PreserveEvents=True)

        save_banks(InputWorkspace=sam_placzek,
                   Filename=nexus_filename,
                   Title="sample_placzek",
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

        # Save after rebin in Q
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        Minus(LHSWorkspace=sam_corrected,
              RHSWorkspace=sam_placzek,
              OutputWorkspace=sam_corrected)

        # Save after subtraction
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        sample_title += '_placzek_corrected'
        save_banks(InputWorkspace=sam_corrected,
                   Filename=nexus_filename,
                   Title=sample_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # STEP 7: Output spectrum

    # TODO Since we already went from Event -> 2D workspace, can't use this
    # anymore
    print('sam:', mtd[sam_corrected].id())
    print('van:', mtd[van_corrected].id())
    if alignAndFocusArgs['PreserveEvents']:
        CompressEvents(InputWorkspace=sam_corrected,
                       OutputWorkspace=sam_corrected)

    # F(Q) bank-by-bank Section
    fq_banks_wksp = "FQ_banks_wksp"
    CloneWorkspace(InputWorkspace=sam_corrected, OutputWorkspace=fq_banks_wksp)
    # TODO: Add the following when implemented - FQ_banks = 'FQ_banks'

    # S(Q) bank-by-bank Section
    material = mtd[sam_corrected].sample().getMaterial()
    if material.name() is None or len(material.name().strip()) == 0:
        raise RuntimeError('Sample material was not set')
    bcoh_avg_sqrd = material.cohScatterLength() * material.cohScatterLength()
    btot_sqrd_avg = material.totalScatterLengthSqrd()
    laue_monotonic_diffuse_scat = btot_sqrd_avg / bcoh_avg_sqrd
    sq_banks_wksp = 'SQ_banks_wksp'
    CloneWorkspace(InputWorkspace=sam_corrected, OutputWorkspace=sq_banks_wksp)

    # TODO: Add the following when implemented
    '''
    SQ_banks = (1. / bcoh_avg_sqrd) * \
        mtd[sq_banks_wksp] - laue_monotonic_diffuse_scat + 1.
    '''

    # Save S(Q) and F(Q) to diagnostics NeXus file
    save_banks(InputWorkspace=fq_banks_wksp,
               Filename=nexus_filename,
               Title="FQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    save_banks(InputWorkspace=sq_banks_wksp,
               Filename=nexus_filename,
               Title="SQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Output a main S(Q) and F(Q) file
    fq_filename = title + '_fofq_banks_corrected.nxs'
    save_banks(InputWorkspace=fq_banks_wksp,
               Filename=fq_filename,
               Title="FQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    sq_filename = title + '_sofq_banks_corrected.nxs'
    save_banks(InputWorkspace=sq_banks_wksp,
               Filename=sq_filename,
               Title="SQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Print log information
    print("<b>^2:", bcoh_avg_sqrd)
    print("<b^2>:", btot_sqrd_avg)
    print("Laue term:", laue_monotonic_diffuse_scat)
    print("sample total xsection:",
          mtd[sam_corrected].sample().getMaterial().totalScatterXSection())
    print("vanadium total xsection:",
          mtd[van_corrected].sample().getMaterial().totalScatterXSection())

    # Output Bragg Diffraction
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target="TOF",
                 EMode="Elastic")

    ConvertToHistogram(InputWorkspace=sam_corrected,
                       OutputWorkspace=sam_corrected)

    xmin, xmax = get_each_spectra_xmin_xmax(mtd[sam_corrected])

    CropWorkspaceRagged(InputWorkspace=sam_corrected,
                        OutputWorkspace=sam_corrected,
                        Xmin=xmin,
                        Xmax=xmax)

    xmin_rebin = min(xmin)
    xmax_rebin = max(xmax)
    tof_binning = "{xmin},-0.01,{xmax}".format(xmin=xmin_rebin,
                                               xmax=xmax_rebin)

    Rebin(InputWorkspace=sam_corrected,
          OutputWorkspace=sam_corrected,
          Params=tof_binning)

    SaveGSS(InputWorkspace=sam_corrected,
            Filename=os.path.join(os.path.abspath(OutputDir), title + ".gsa"),
            SplitFiles=False,
            Append=False,
            MultiplyByBinWidth=True,
            Format="SLOG",
            ExtendedHeader=True)

    return mtd[sam_corrected]
示例#15
0
    def PyExec(self):
        in_Runs = self.getProperty("RunNumbers").value
        progress = Progress(self, 0., .25, 3)
        finalUnits = self.getPropertyValue("FinalUnits")
        self.chunkSize = self.getProperty('MaxChunkSize').value

        # default arguments for AlignAndFocusPowder
        self.alignAndFocusArgs = {'Tmin': 0,
                                  'TMax': 50000,
                                  'RemovePromptPulseWidth': 1600,
                                  'PreserveEvents': False,
                                  'Dspacing': True,  # binning parameters in d-space
                                  'Params': self.getProperty("Binning").value,
                                  }

        # workspace for loading metadata only to be used in LoadDiffCal and
        # CreateGroupingWorkspace
        metaWS = None

        # either type of file-based calibration is stored in the same variable
        calib = self.getProperty("Calibration").value
        detcalFile = None
        if calib == "Calibration File":
            metaWS = self._loadMetaWS(in_Runs[0])
            LoadDiffCal(Filename=self.getPropertyValue("CalibrationFilename"),
                        WorkspaceName='SNAP',
                        InputWorkspace=metaWS,
                        MakeGroupingWorkspace=False, MakeMaskWorkspace=False)
            self.alignAndFocusArgs['CalibrationWorkspace'] = 'SNAP_cal'
        elif calib == 'DetCal File':
            detcalFile = ','.join(self.getProperty('DetCalFilename').value)
        progress.report('loaded calibration')

        norm = self.getProperty("Normalization").value

        if norm == "From Processed Nexus":
            norm_File = self.getProperty("NormalizationFilename").value
            normalizationWS = 'normWS'
            LoadNexusProcessed(Filename=norm_File, OutputWorkspace=normalizationWS)
            progress.report('loaded normalization')
        elif norm == "From Workspace":
            normalizationWS = str(self.getProperty("NormalizationWorkspace").value)
            progress.report('')
        else:
            normalizationWS = None
            progress.report('')

        self.alignAndFocusArgs['GroupingWorkspace'] = self._generateGrouping(in_Runs[0], metaWS, progress)
        self.alignAndFocusArgs['MaskWorkspace'] = self._getMaskWSname(in_Runs[0], metaWS)  # can be empty string

        if metaWS is not None:
            DeleteWorkspace(Workspace=metaWS)

        Process_Mode = self.getProperty("ProcessingMode").value

        prefix = self.getProperty("OptionalPrefix").value

        Tag = 'SNAP'
        progStart = .25
        progDelta = (1.-progStart)/len(in_Runs)

        # --------------------------- PROCESS BACKGROUND ----------------------
        if not self.getProperty('Background').isDefault:
            progDelta = (1. - progStart) / (len(in_Runs) + 1)  # redefine to account for background

            background = 'SNAP_{}'.format(self.getProperty('Background').value)
            self.log().notice("processing run background {}".format(background))
            background, unfocussedBkgd = self._alignAndFocus(background,
                                                             background+'_bkgd_red',
                                                             detCalFilename=detcalFile,
                                                             withUnfocussed=(Process_Mode == 'Set-Up'),
                                                             progStart=progStart, progDelta=progDelta)
        else:
            background = None
            unfocussedBkgd = ''

        # --------------------------- REDUCE DATA -----------------------------

        for i, runnumber in enumerate(in_Runs):
            self.log().notice("processing run %s" % runnumber)

            # put together output names
            new_Tag = Tag
            if len(prefix) > 0:
                new_Tag = prefix + '_' + new_Tag
            basename = '%s_%s_%s' % (new_Tag, runnumber, self.alignAndFocusArgs['GroupingWorkspace'])
            self.log().warning('{}:{}:{}'.format(i, new_Tag, basename))
            redWS, unfocussedWksp = self._alignAndFocus('SNAP_{}'.format(runnumber),
                                                        basename + '_red',
                                                        detCalFilename=detcalFile,
                                                        withUnfocussed=(Process_Mode == 'Set-Up'),
                                                        progStart=progStart, progDelta=progDelta*.5)
            progStart += .5 * progDelta

            # subtract the background if it was supplied
            if background:
                self.log().information('subtracting {} from {}'.format(background, redWS))
                Minus(LHSWorkspace=redWS, RHSWorkspace=background, OutputWorkspace=redWS)
                # intentionally don't subtract the unfocussed workspace since it hasn't been normalized by counting time

            # the rest takes up .25 percent of the run processing
            progress = Progress(self, progStart, progStart+.25*progDelta, 2)

            # AlignAndFocusPowder leaves the data in time-of-flight
            ConvertUnits(InputWorkspace=redWS, OutputWorkspace=redWS, Target='dSpacing', EMode='Elastic')

            # Edit instrument geometry to make final workspace smaller on disk
            det_table = PreprocessDetectorsToMD(Inputworkspace=redWS,
                                                OutputWorkspace='__SNAP_det_table')
            polar = np.degrees(det_table.column('TwoTheta'))
            azi = np.degrees(det_table.column('Azimuthal'))
            EditInstrumentGeometry(Workspace=redWS, L2=det_table.column('L2'),
                                   Polar=polar, Azimuthal=azi)
            mtd.remove('__SNAP_det_table')
            progress.report('simplify geometry')

            # AlignAndFocus doesn't necessarily rebin the data correctly
            if Process_Mode == "Set-Up":
                Rebin(InputWorkspace=unfocussedWksp, Params=self.alignAndFocusArgs['Params'],
                      Outputworkspace=unfocussedWksp)
                if background:
                    Rebin(InputWorkspace=unfocussedBkgd, Params=self.alignAndFocusArgs['Params'],
                          Outputworkspace=unfocussedBkgd)
            # normalize the data as requested
            normalizationWS = self._generateNormalization(redWS, norm, normalizationWS)
            normalizedWS = None
            if normalizationWS is not None:
                normalizedWS = basename + '_nor'
                Divide(LHSWorkspace=redWS, RHSWorkspace=normalizationWS,
                       OutputWorkspace=normalizedWS)
                ReplaceSpecialValues(Inputworkspace=normalizedWS,
                                     OutputWorkspace=normalizedWS,
                                     NaNValue='0', NaNError='0',
                                     InfinityValue='0', InfinityError='0')
                progress.report('normalized')
            else:
                progress.report()

            # rename everything as appropriate and determine output workspace name
            if normalizedWS is None:
                outputWksp = redWS
            else:
                outputWksp = normalizedWS

                if norm == "Extracted from Data" and Process_Mode == "Production":
                        DeleteWorkspace(Workspace=redWS)
                        DeleteWorkspace(Workspace=normalizationWS)

            # Save requested formats - function checks that saving is requested
            self._save(runnumber, basename, outputWksp)

            # set workspace as an output so it gets history
            ConvertUnits(InputWorkspace=str(outputWksp), OutputWorkspace=str(outputWksp), Target=finalUnits,
                         EMode='Elastic')
            self._exportWorkspace('OutputWorkspace_' + str(outputWksp), outputWksp)

            # declare some things as extra outputs in set-up
            if Process_Mode != "Production":
                propprefix = 'OutputWorkspace_{:d}_'.format(i)
                propNames = [propprefix + it for it in ['d', 'norm', 'normalizer']]
                wkspNames = ['%s_%s_d' % (new_Tag, runnumber),
                             basename + '_red',
                             '%s_%s_normalizer' % (new_Tag, runnumber)]
                for (propName, wkspName) in zip(propNames, wkspNames):
                    self._exportWorkspace(propName, wkspName)

        if background:
            ConvertUnits(InputWorkspace=str(background), OutputWorkspace=str(background), Target=finalUnits,
                         EMode='Elastic')
            prefix = 'OutputWorkspace_{}'.format(len(in_Runs))
            propNames = [prefix + it for it in ['', '_d']]
            wkspNames = [background, unfocussedBkgd]
            for (propName, wkspName) in zip(propNames, wkspNames):
                self._exportWorkspace(propName, wkspName)
    def PyExec(self):
        data = self.getProperty("InputWorkspace").value  # [1~n]
        bkg = self.getProperty("BackgroundWorkspace").value  # [1~n]
        cal = self.getProperty("CalibrationWorkspace").value  # [1]
        xMin = self.getProperty("XMin").value
        xMax = self.getProperty("XMax").value
        numberBins = self.getProperty("NumberBins").value
        outWS = self.getPropertyValue("OutputWorkspace")

        # NOTE:
        # StringArrayProperty cannot be optional, so the background can only be passed in as a string
        # or a list, which will be manually unpacked here
        if bkg != "":
            bkg = [
                AnalysisDataService.retrieve(me)
                for me in map(str.strip, bkg.split(","))
            ]

        # NOTE:
        # xMin and xMax are initialized as empty numpy.array (np.array([])).
        _xMin, _xMax = self._locate_global_xlimit()
        xMin = _xMin if xMin.size == 0 else xMin
        xMax = _xMax if xMax.size == 0 else xMax

        # BEGIN_FOR: prcess_spectra
        for n, _wsn in enumerate(data):
            _mskn = f"__mask_{n}"  # calculated in previous loop
            _ws = AnalysisDataService.retrieve(_wsn)

            # resample spectra
            _ws_resampled = ResampleX(
                InputWorkspace=f"__ws_{n}",
                XMin=xMin,
                XMax=xMax,
                NumberBins=numberBins,
                EnableLogging=False,
            )

            # calibration
            if cal is not None:
                _ws_cal_resampled = self._resample_calibration(_ws, _mskn, xMin, xMax)
                _ws_resampled = Divide(
                    LHSWorkspace=_ws_resampled,
                    RHSWorkspace=_ws_cal_resampled,
                    EnableLogging=False,
                )
            else:
                _ws_cal_resampled = None

            _ws_resampled = Scale(
                InputWorkspace=_ws_resampled,
                Factor=self._get_scale(cal) / self._get_scale(_ws),
                EnableLogging=False,
            )

            # background
            if bkg != "":
                bgn = bkg[n] if isinstance(bkg, list) else bkg

                _ws_bkg_resampled = self._resample_background(
                    bgn, _ws, _mskn, xMin, xMax, _ws_cal_resampled
                )

                _ws_resampled = Minus(
                    LHSWorkspace=_ws_resampled,
                    RHSWorkspace=_ws_bkg_resampled,
                    EnableLogging=False,
                )

            # conjoin
            if n < 1:
                CloneWorkspace(
                    InputWorkspace=_ws_resampled,
                    OutputWorkspace="__ws_conjoined",
                    EnableLogging=False,
                )
            else:
                ConjoinWorkspaces(
                    InputWorkspace1="__ws_conjoined",
                    InputWorkspace2=_ws_resampled,
                    CheckOverlapping=False,
                    EnableLogging=False,
                )
        # END_FOR: prcess_spectra

        # Step_3: sum all spectra
        # ref: https://docs.mantidproject.org/nightly/algorithms/SumSpectra-v1.html
        if cal is not None:
            SumSpectra(
                InputWorkspace="__ws_conjoined",
                OutputWorkspace=outWS,
                WeightedSum=True,
                MultiplyBySpectra=False,
                EnableLogging=False,
            )
        else:
            SumSpectra(
                InputWorkspace="__ws_conjoined",
                OutputWorkspace=outWS,
                WeightedSum=True,
                MultiplyBySpectra=True,
                EnableLogging=False,
            )

        self.setProperty("OutputWorkspace", outWS)

        # Step_4: remove temp workspaces
        [
            DeleteWorkspace(ws, EnableLogging=False)
            for ws in self.temp_workspace_list
            if mtd.doesExist(ws)
        ]
示例#17
0
    def PyExec(self):
        data = self.getProperty("InputWorkspace").value
        cal = self.getProperty("CalibrationWorkspace").value
        bkg = self.getProperty("BackgroundWorkspace").value
        mask = self.getProperty("MaskWorkspace").value
        target = self.getProperty("Target").value
        eFixed = self.getProperty("EFixed").value
        xMin = self.getProperty("XMin").value
        xMax = self.getProperty("XMax").value
        numberBins = self.getProperty("NumberBins").value
        normaliseBy = self.getProperty("NormaliseBy").value
        maskAngle = self.getProperty("MaskAngle").value
        outWS = self.getPropertyValue("OutputWorkspace")

        data_scale = 1
        cal_scale = 1
        bkg_scale = 1

        if normaliseBy == "Monitor":
            data_scale = data.run().getProtonCharge()
        elif normaliseBy == "Time":
            data_scale = data.run().getLogData('duration').value

        ExtractMask(data, OutputWorkspace='__mask_tmp', EnableLogging=False)

        if maskAngle != Property.EMPTY_DBL:
            MaskAngle(Workspace='__mask_tmp',
                      MinAngle=maskAngle,
                      Angle='Phi',
                      EnableLogging=False)

        if mask is not None:
            BinaryOperateMasks(InputWorkspace1='__mask_tmp',
                               InputWorkspace2=mask,
                               OperationType='OR',
                               OutputWorkspace='__mask_tmp',
                               EnableLogging=False)

        ExtractUnmaskedSpectra(InputWorkspace=data,
                               MaskWorkspace='__mask_tmp',
                               OutputWorkspace='__data_tmp',
                               EnableLogging=False)
        if isinstance(mtd['__data_tmp'], IEventWorkspace):
            Integration(InputWorkspace='__data_tmp',
                        OutputWorkspace='__data_tmp',
                        EnableLogging=False)
        ConvertSpectrumAxis(InputWorkspace='__data_tmp',
                            Target=target,
                            EFixed=eFixed,
                            OutputWorkspace=outWS,
                            EnableLogging=False)
        Transpose(InputWorkspace=outWS,
                  OutputWorkspace=outWS,
                  EnableLogging=False)
        ResampleX(InputWorkspace=outWS,
                  OutputWorkspace=outWS,
                  XMin=xMin,
                  XMax=xMax,
                  NumberBins=numberBins,
                  EnableLogging=False)

        if cal is not None:
            ExtractUnmaskedSpectra(InputWorkspace=cal,
                                   MaskWorkspace='__mask_tmp',
                                   OutputWorkspace='__cal_tmp',
                                   EnableLogging=False)
            if isinstance(mtd['__cal_tmp'], IEventWorkspace):
                Integration(InputWorkspace='__cal_tmp',
                            OutputWorkspace='__cal_tmp',
                            EnableLogging=False)
            CopyInstrumentParameters(data, '__cal_tmp', EnableLogging=False)
            ConvertSpectrumAxis(InputWorkspace='__cal_tmp',
                                Target=target,
                                EFixed=eFixed,
                                OutputWorkspace='__cal_tmp',
                                EnableLogging=False)
            Transpose(InputWorkspace='__cal_tmp',
                      OutputWorkspace='__cal_tmp',
                      EnableLogging=False)
            ResampleX(InputWorkspace='__cal_tmp',
                      OutputWorkspace='__cal_tmp',
                      XMin=xMin,
                      XMax=xMax,
                      NumberBins=numberBins,
                      EnableLogging=False)
            Divide(LHSWorkspace=outWS,
                   RHSWorkspace='__cal_tmp',
                   OutputWorkspace=outWS,
                   EnableLogging=False)
            if normaliseBy == "Monitor":
                cal_scale = cal.run().getProtonCharge()
            elif normaliseBy == "Time":
                cal_scale = cal.run().getLogData('duration').value

        Scale(InputWorkspace=outWS,
              OutputWorkspace=outWS,
              Factor=cal_scale / data_scale,
              EnableLogging=False)

        if bkg is not None:
            ExtractUnmaskedSpectra(InputWorkspace=bkg,
                                   MaskWorkspace='__mask_tmp',
                                   OutputWorkspace='__bkg_tmp',
                                   EnableLogging=False)
            if isinstance(mtd['__bkg_tmp'], IEventWorkspace):
                Integration(InputWorkspace='__bkg_tmp',
                            OutputWorkspace='__bkg_tmp',
                            EnableLogging=False)
            CopyInstrumentParameters(data, '__bkg_tmp', EnableLogging=False)
            ConvertSpectrumAxis(InputWorkspace='__bkg_tmp',
                                Target=target,
                                EFixed=eFixed,
                                OutputWorkspace='__bkg_tmp',
                                EnableLogging=False)
            Transpose(InputWorkspace='__bkg_tmp',
                      OutputWorkspace='__bkg_tmp',
                      EnableLogging=False)
            ResampleX(InputWorkspace='__bkg_tmp',
                      OutputWorkspace='__bkg_tmp',
                      XMin=xMin,
                      XMax=xMax,
                      NumberBins=numberBins,
                      EnableLogging=False)
            if cal is not None:
                Divide(LHSWorkspace='__bkg_tmp',
                       RHSWorkspace='__cal_tmp',
                       OutputWorkspace='__bkg_tmp',
                       EnableLogging=False)
            if normaliseBy == "Monitor":
                bkg_scale = bkg.run().getProtonCharge()
            elif normaliseBy == "Time":
                bkg_scale = bkg.run().getLogData('duration').value
            Scale(InputWorkspace='__bkg_tmp',
                  OutputWorkspace='__bkg_tmp',
                  Factor=cal_scale / bkg_scale,
                  EnableLogging=False)
            Scale(InputWorkspace='__bkg_tmp',
                  OutputWorkspace='__bkg_tmp',
                  Factor=self.getProperty('BackgroundScale').value,
                  EnableLogging=False)
            Minus(LHSWorkspace=outWS,
                  RHSWorkspace='__bkg_tmp',
                  OutputWorkspace=outWS,
                  EnableLogging=False)

        self.setProperty("OutputWorkspace", outWS)

        # remove temp workspaces
        [
            DeleteWorkspace(ws, EnableLogging=False)
            for ws in self.temp_workspace_list if mtd.doesExist(ws)
        ]
示例#18
0
    def PyExec(self):
        data = self._expand_groups()
        bkg = self.getProperty(
            "BackgroundWorkspace").valueAsStr  # same background for all
        cal = self.getProperty(
            "CalibrationWorkspace").value  # same calibration for all
        numberBins = self.getProperty("NumberBins").value
        outWS = self.getPropertyValue("OutputWorkspace")
        summing = self.getProperty("Sum").value  # [Yes or No]

        # convert all of the input workspaces into spectrum of "target" units (generally angle)
        data, masks = self._convert_data(data)

        # determine x-range
        xMin, xMax = self._locate_global_xlimit(data)

        # BEGIN_FOR: prcess_spectra
        for n, (_wsn, _mskn) in enumerate(zip(data, masks)):
            # resample spectra
            ResampleX(
                InputWorkspace=_wsn,
                OutputWorkspace=_wsn,
                XMin=xMin,
                XMax=xMax,
                NumberBins=numberBins,
                EnableLogging=False,
            )

            # calibration
            if cal is not None:
                _ws_cal_resampled = self._resample_calibration(
                    _wsn, _mskn, xMin, xMax)
                Divide(
                    LHSWorkspace=_wsn,
                    RHSWorkspace=_ws_cal_resampled,
                    OutputWorkspace=_wsn,
                    EnableLogging=False,
                )
            else:
                _ws_cal_resampled = None

            Scale(
                InputWorkspace=_wsn,
                OutputWorkspace=_wsn,
                Factor=self._get_scale(cal) / self._get_scale(_wsn),
                EnableLogging=False,
            )

            # background
            if bkg:
                _ws_bkg_resampled = self._resample_background(
                    bkg, _wsn, _mskn, xMin, xMax, _ws_cal_resampled)

                Minus(
                    LHSWorkspace=_wsn,
                    RHSWorkspace=_ws_bkg_resampled,
                    OutputWorkspace=_wsn,
                    EnableLogging=False,
                )

            if summing:
                # conjoin
                if n < 1:
                    RenameWorkspace(
                        InputWorkspace=_wsn,
                        OutputWorkspace="__ws_conjoined",
                        EnableLogging=False,
                    )
                else:
                    # this adds to `InputWorkspace1`
                    ConjoinWorkspaces(
                        InputWorkspace1="__ws_conjoined",
                        InputWorkspace2=_wsn,
                        CheckOverlapping=False,
                        EnableLogging=False,
                    )

        # END_FOR: prcess_spectra
        # Step_3: sum all spectra
        # ref: https://docs.mantidproject.org/nightly/algorithms/SumSpectra-v1.html
        if summing:
            if cal is not None:
                outWS = SumSpectra(
                    InputWorkspace="__ws_conjoined",
                    OutputWorkspace=outWS,
                    WeightedSum=True,
                    MultiplyBySpectra=not bool(cal),
                    EnableLogging=False,
                )
            else:
                outWS = SumSpectra(
                    InputWorkspace="__ws_conjoined",
                    OutputWorkspace=outWS,
                    WeightedSum=True,
                    MultiplyBySpectra=True,
                    EnableLogging=False,
                )
        else:
            if len(data) == 1:
                outWS = RenameWorkspace(InputWorkspace=data[0],
                                        OutputWorkspace=outWS)
            else:
                outWS = GroupWorkspaces(InputWorkspaces=data,
                                        OutputWorkspace=outWS)

        self.setProperty("OutputWorkspace", outWS)

        # Step_4: remove temp workspaces
        [
            DeleteWorkspace(ws, EnableLogging=False)
            for ws in self.temp_workspace_list if mtd.doesExist(ws)
        ]
示例#19
0
    def PyExec(self):
        _background = bool(self.getProperty("Background").value)
        _load_inst = bool(self.getProperty("LoadInstrument").value)
        _norm_current = bool(self.getProperty("NormaliseByCurrent").value)
        _detcal = bool(self.getProperty("DetCal").value)
        _masking = bool(self.getProperty("MaskFile").value)
        _grouping = bool(self.getProperty("GroupingFile").value)
        _anvred = bool(self.getProperty("SphericalAbsorptionCorrection").value)
        _SA_name = self.getPropertyValue("SolidAngleOutputWorkspace")
        _Flux_name = self.getPropertyValue("FluxOutputWorkspace")

        XMin = self.getProperty("MomentumMin").value
        XMax = self.getProperty("MomentumMax").value
        rebin_param = ','.join([str(XMin), str(XMax), str(XMax)])

        Load(Filename=self.getPropertyValue("Filename"),
             OutputWorkspace='__van',
             FilterByTofMin=self.getProperty("FilterByTofMin").value,
             FilterByTofMax=self.getProperty("FilterByTofMax").value)

        if _norm_current:
            NormaliseByCurrent(InputWorkspace='__van', OutputWorkspace='__van')

        if _background:
            Load(Filename=self.getProperty("Background").value,
                 OutputWorkspace='__bkg',
                 FilterByTofMin=self.getProperty("FilterByTofMin").value,
                 FilterByTofMax=self.getProperty("FilterByTofMax").value)
            if _norm_current:
                NormaliseByCurrent(InputWorkspace='__bkg',
                                   OutputWorkspace='__bkg')
            else:
                pc_van = mtd['__van'].run().getProtonCharge()
                pc_bkg = mtd['__bkg'].run().getProtonCharge()
                mtd['__bkg'] *= pc_van / pc_bkg
            mtd['__bkg'] *= self.getProperty('BackgroundScale').value
            Minus(LHSWorkspace='__van',
                  RHSWorkspace='__bkg',
                  OutputWorkspace='__van')
            DeleteWorkspace('__bkg')

        if _load_inst:
            LoadInstrument(Workspace='__van',
                           Filename=self.getProperty("LoadInstrument").value,
                           RewriteSpectraMap=False)
        if _detcal:
            LoadIsawDetCal(InputWorkspace='__van',
                           Filename=self.getProperty("DetCal").value)

        if _masking:
            LoadMask(Instrument=mtd['__van'].getInstrument().getName(),
                     InputFile=self.getProperty("MaskFile").value,
                     OutputWorkspace='__mask')
            MaskDetectors(Workspace='__van', MaskedWorkspace='__mask')
            DeleteWorkspace('__mask')

        ConvertUnits(InputWorkspace='__van',
                     OutputWorkspace='__van',
                     Target='Momentum')
        Rebin(InputWorkspace='__van',
              OutputWorkspace='__van',
              Params=rebin_param)
        CropWorkspace(InputWorkspace='__van',
                      OutputWorkspace='__van',
                      XMin=XMin,
                      XMax=XMax)

        if _anvred:
            AnvredCorrection(InputWorkspace='__van',
                             OutputWorkspace='__van',
                             LinearScatteringCoef=self.getProperty(
                                 "LinearScatteringCoef").value,
                             LinearAbsorptionCoef=self.getProperty(
                                 "LinearAbsorptionCoef").value,
                             Radius=self.getProperty("Radius").value,
                             OnlySphericalAbsorption='1',
                             PowerLambda='0')

        # Create solid angle
        Rebin(InputWorkspace='__van',
              OutputWorkspace=_SA_name,
              Params=rebin_param,
              PreserveEvents=False)

        # Create flux
        if _grouping:
            GroupDetectors(InputWorkspace='__van',
                           OutputWorkspace='__van',
                           MapFile=self.getProperty("GroupingFile").value)
        else:
            SumSpectra(InputWorkspace='__van', OutputWorkspace='__van')

        Rebin(InputWorkspace='__van',
              OutputWorkspace='__van',
              Params=rebin_param)
        flux = mtd['__van']
        for i in range(flux.getNumberHistograms()):
            el = flux.getSpectrum(i)
            if flux.readY(i)[0] > 0:
                el.divide(flux.readY(i)[0], flux.readE(i)[0])
        SortEvents(InputWorkspace='__van', SortBy="X Value")
        IntegrateFlux(InputWorkspace='__van',
                      OutputWorkspace=_Flux_name,
                      NPoints=10000)
        DeleteWorkspace('__van')

        self.setProperty("SolidAngleOutputWorkspace", mtd[_SA_name])
        self.setProperty("FluxOutputWorkspace", mtd[_Flux_name])