示例#1
0
def sumToShim(rnum, output_dir=None):
    """
    Combine both spin states into a single workspace

    Parameters
    ----------
    rnum : int
      The run number to be shimmed
    output_dir : string
      If given, the folder where the workspace should be saved

    """
    try:
        wtemp = Load(BASE.format(rnum), LoadMonitors=True)
        RebinToWorkspace('wtemp_1',
                         'wtemp_monitors_1',
                         PreserveEvents=False,
                         OutputWorkspace='wtemp_1')
        RebinToWorkspace('wtemp_2',
                         'wtemp_monitors_1',
                         PreserveEvents=False,
                         OutputWorkspace='wtemp_2')
        wtemp_1 = ConjoinWorkspaces('wtemp_monitors_1', 'wtemp_1')
        wtemp_2 = ConjoinWorkspaces('wtemp_monitors_2', 'wtemp_2')
    except:
        wtemp_monitors = Load(BASE.format(rnum))
    wtempShim = mtd['wtemp_monitors_1'] + mtd['wtemp_monitors_2']
    RenameWorkspace(wtempShim, 'LARMOR{:08d}'.format(rnum))
    if output_dir:
        SaveNexusProcessed(
            'LARMOR{:08d}'.format(rnum),
            os.path.join(output_dir, "LARMOR{:08d}-add.nxs".format(rnum)))
    RenameWorkspace('LARMOR{:08d}'.format(rnum),
                    'LARMOR{:08d}-add'.format(rnum))
示例#2
0
def rebin_to_smallest(*workspaces):
    """
    Rebins the specified list to the workspace with the smallest
    x-range in the list.

    :param workspaces: The list of workspaces to rebin to the smallest.
    :return:           The rebinned list of workspaces.
    """
    if len(workspaces) == 1:
        return workspaces

    smallest_idx, smallest_ws = \
        min(enumerate(workspaces), key=lambda x: x[1].blocksize())

    rebinned_workspaces = []
    for idx, workspace in enumerate(workspaces):

        # Check whether this is the workspace with the smallest x-range.
        # No reason to rebin workspace to match itself.
        # NOTE: In the future this may append workspace.clone() - this will
        # occur in the circumstance that the input files do not want to be
        # removed from the ADS.
        if idx == smallest_idx:
            rebinned_workspaces.append(workspace)
        else:
            rebinned_workspaces.append(
                RebinToWorkspace(WorkspaceToRebin=workspace,
                                 WorkspaceToMatch=smallest_ws,
                                 OutputWorkspace="rebinned",
                                 StoreInADS=False,
                                 EnableLogging=False))

    return rebinned_workspaces
 def runTest(self):
     # Load processed vanadium for normalisation (bank 1)
     van = LoadNexus(Filename="WISH19612_vana_bank1_SXProcessed.nxs")
     # Load raw data (bank 1)
     ws = load_data_and_normalise(
         "WISH00038237.raw")  # default so doesn't get overwrite van
     # normalise to vanadium
     RebinToWorkspace(WorkspaceToRebin=van,
                      WorkspaceToMatch=ws,
                      OutputWorkspace=van)
     Divide(LHSWorkspace=ws, RHSWorkspace=van, OutputWorkspace=ws)
     ReplaceSpecialValues(InputWorkspace=ws,
                          OutputWorkspace=ws,
                          NaNValue=0,
                          InfinityValue=0,
                          BigNumberThreshold=1e15,
                          SmallNumberThreshold=-1e15)
     # Convert to Diffraction MD and Lorentz Correction
     wsMD = ConvertToDiffractionMDWorkspace(InputWorkspace=ws,
                                            LorentzCorrection=True,
                                            OneEventPerBin=False)
     # BinMD to 2D object and convert to histo so can compare saved workspace
     wsMD_2Dcut = BinMD(InputWorkspace=wsMD,
                        AxisAligned=False,
                        BasisVector0='Q_lab_x,Angstrom^-1,1.0,0.0,0.0',
                        BasisVector1='Q_lab_y,Angstrom^-1,0.0,1.0,0.0',
                        BasisVector2='Q_lab_z,Angstrom^-1,0.0,0.0,1.0',
                        OutputExtents='0.2,0.8,-0.4,0.4,0.05,0.1',
                        OutputBins='50,50,1')
     ConvertMDHistoToMatrixWorkspace(InputWorkspace=wsMD_2Dcut,
                                     outputWorkspace="wsHisto_2Dcut")
    def __accumulate(self, chunkname, sumname, chunkunfocusname, sumuunfocusname, firstrun, removelogs=False):
        """accumulate newdata `wkspname` into sum `sumwkspname` and delete `wkspname`"""
        # the first call to accumulate to a specific target should be a simple rename
        self.log().debug('__accumulate({}, {}, {}, {}, {})'.format(chunkname, sumname, chunkunfocusname,
                                                                   sumuunfocusname, firstrun))
        if chunkname == sumname:
            return  # there is nothing to be done

        if not firstrun:
            # if the sum workspace doesn't already exist, just rename
            if not mtd.doesExist(sumname):
                firstrun = True

        if firstrun:
            if chunkname != sumname:
                RenameWorkspace(InputWorkspace=chunkname, OutputWorkspace=sumname)
            if chunkunfocusname and chunkunfocusname != sumuunfocusname:
                RenameWorkspace(InputWorkspace=chunkunfocusname, OutputWorkspace=sumuunfocusname)
        else:
            if removelogs:
                RemoveLogs(Workspace=chunkname)  # accumulation has them already
            RebinToWorkspace(WorkspaceToRebin=chunkname, WorkspaceToMatch=sumname,
                             OutputWorkspace=chunkname)
            Plus(LHSWorkspace=sumname, RHSWorkspace=chunkname, OutputWorkspace=sumname,
                 ClearRHSWorkspace=self.kwargs['PreserveEvents'])
            DeleteWorkspace(Workspace=chunkname)
            self.__compressEvents(sumname)  # could be smarter about when to run

            if chunkunfocusname and chunkunfocusname != sumuunfocusname:
                if removelogs:
                    RemoveLogs(Workspace=chunkunfocusname)  # accumulation has them already
                Plus(LHSWorkspace=sumuunfocusname, RHSWorkspace=chunkunfocusname, OutputWorkspace=sumuunfocusname,
                     ClearRHSWorkspace=self.kwargs['PreserveEvents'])
                DeleteWorkspace(Workspace=chunkunfocusname)
                self.__compressEvents(sumuunfocusname)  # could be smarter about when to run
def rebin_reduction(workspace_name, rebin_string, multi_frame_rebin_string, num_bins):
    """
    @param workspace_name Name of workspace to rebin
    @param rebin_string Rebin parameters
    @param multi_frame_rebin_string Rebin string for multiple frame rebinning
    @param num_bins Max number of bins in input frames
    """
    from mantid.simpleapi import (Rebin, RebinToWorkspace, SortXAxis)

    if rebin_string is not None:
        if multi_frame_rebin_string is not None and num_bins is not None:
            # Multi frame data
            if mtd[workspace_name].blocksize() == num_bins:
                Rebin(InputWorkspace=workspace_name,
                      OutputWorkspace=workspace_name,
                      Params=rebin_string)
            else:
                Rebin(InputWorkspace=workspace_name,
                      OutputWorkspace=workspace_name,
                      Params=multi_frame_rebin_string)
        else:
            # Regular data
            SortXAxis(InputWorkspace=workspace_name,
                      OutputWorkspace=workspace_name)
            Rebin(InputWorkspace=workspace_name,
                  OutputWorkspace=workspace_name,
                  Params=rebin_string)
    else:
        try:
            # If user does not want to rebin then just ensure uniform binning across spectra
            RebinToWorkspace(WorkspaceToRebin=workspace_name,
                             WorkspaceToMatch=workspace_name,
                             OutputWorkspace=workspace_name)
        except RuntimeError:
            logger.warning('Rebinning failed, will try to continue anyway.')
 def _waterCalibration(self, ws):
     """Divide ws by a (water) reference workspace."""
     if self.getProperty(Prop.WATER_REFERENCE).isDefault:
         return ws
     waterWS = self.getProperty(Prop.WATER_REFERENCE).value
     detWSName = self._names.withSuffix('water_detectors')
     waterWS = ExtractMonitors(InputWorkspace=waterWS,
                               DetectorWorkspace=detWSName,
                               EnableLogging=self._subalgLogging)
     if mtd.doesExist(detWSName) is None:
         raise RuntimeError('No detectors in the water reference data.')
     if waterWS.getNumberHistograms() != ws.getNumberHistograms():
         self.log().error(
             'Water workspace and run do not have the same number of histograms.'
         )
     rebinnedWaterWSName = self._names.withSuffix('water_rebinned')
     rebinnedWaterWS = RebinToWorkspace(WorkspaceToRebin=waterWS,
                                        WorkspaceToMatch=ws,
                                        OutputWorkspace=rebinnedWaterWSName,
                                        EnableLogging=self._subalgLogging)
     calibratedWSName = self._names.withSuffix('water_calibrated')
     calibratedWS = Divide(LHSWorkspace=ws,
                           RHSWorkspace=rebinnedWaterWS,
                           OutputWorkspace=calibratedWSName,
                           EnableLogging=self._subalgLogging)
     self._cleanup.cleanup(waterWS)
     self._cleanup.cleanup(rebinnedWaterWS)
     self._cleanup.cleanup(ws)
     return calibratedWS
示例#7
0
 def _commonBinning(self, wss):
     """Rebin all workspaces in wss to the first one."""
     for i in range(1, len(wss)):
         RebinToWorkspace(WorkspaceToRebin=wss[i],
                          OutputWorkspace=wss[i],
                          WorkspaceToMatch=wss[0],
                          EnableLogging=self._subalgLogging)
     return wss
 def _rebinToDirect(self, ws):
     """Rebin ws to direct foreground."""
     directWS = self.getProperty(Prop.DIRECT_FOREGROUND_WS).value
     rebinnedWSName = self._names.withSuffix('rebinned')
     rebinnedWS = RebinToWorkspace(WorkspaceToRebin=ws,
                                   WorkspaceToMatch=directWS,
                                   OutputWorkspace=rebinnedWSName,
                                   EnableLogging=self._subalgLogging)
     self._cleanup.cleanup(ws)
     return rebinnedWS
 def _commonBinning(self, wss):
     """Rebin all workspaces in wss to the first one."""
     rebinnedWSs = [wss[0]]
     for i in range(1, len(wss)):
         rebinnedWSName = self._names.withSuffix('rebinned_to_' +
                                                 str(wss[0]))
         RebinToWorkspace(WorkspaceToRebin=wss[i],
                          OutputWorkspace=rebinnedWSName,
                          WorkspaceToMatch=wss[0],
                          EnableLogging=self._subalgLogging)
         self._cleanup.cleanup(wss[i])
         rebinnedWSs.append(rebinnedWSName)
     return rebinnedWSs
示例#10
0
def rebin_and_subtract(minuend_workspace, subtrahend_workspace):
    """
    Rebins the subtrahend workspace to match the minuend workspace and
    then subtracts the subtrahend workspace from the minuend workspace.

    :param minuend_workspace:       The workspace to subtract from.
    :param subtrahend_workspace:    The workspace to be subtracted.
    :return:                        The minuend workspace - the subtrahend workspace.
    """
    return minuend_workspace - RebinToWorkspace(WorkspaceToRebin=subtrahend_workspace,
                                                WorkspaceToMatch=minuend_workspace,
                                                OutputWorkspace="rebinned_container",
                                                StoreInADS=False, EnableLogging=False)
示例#11
0
    def _subtract(self, minuend_workspace, subtrahend_workspace):
        """
        Do a simple container subtraction (when no corrections are given).
        """

        logger.information('Using simple container subtraction')

        if self._rebin_container_ws:
            logger.information('Rebining container to ensure Minus')
            subtrahend_workspace = RebinToWorkspace(
                WorkspaceToRebin=subtrahend_workspace,
                WorkspaceToMatch=minuend_workspace,
                OutputWorkspace="__rebinned",
                StoreInADS=False)
        return minuend_workspace - subtrahend_workspace
def _subtractEC(ws, ecWS, ecScaling, wsNames, wsCleanup, algorithmLogging):
    """Subtract empty container."""
    # out = in - ecScaling * EC
    scaledECWSName = wsNames.withSuffix('scaled_EC')
    scaledECWS = Scale(InputWorkspace=ecWS,
                       Factor=ecScaling,
                       OutputWorkspace=scaledECWSName,
                       EnableLogging=algorithmLogging)
    rebinnedECWSName = wsNames.withSuffix('rebinned_EC')
    rebinnedECWS = RebinToWorkspace(WorkspaceToRebin=scaledECWS,
                                    WorkspaceToMatch=ws,
                                    OutputWorkspace=rebinnedECWSName)
    ecSubtractedWSName = wsNames.withSuffix('EC_subtracted')
    ecSubtractedWS = Minus(LHSWorkspace=ws,
                           RHSWorkspace=rebinnedECWS,
                           OutputWorkspace=ecSubtractedWSName,
                           EnableLogging=algorithmLogging)
    wsCleanup.cleanup(scaledECWS)
    return ecSubtractedWS
 def _applyCorrections(self, mainWS):
     """Applies self shielding corrections to a workspace, if corrections exist."""
     if self.getProperty(
             common.PROP_SELF_SHIELDING_CORRECTION_WS).isDefault:
         return mainWS, False
     correctionWS = self.getProperty(
         common.PROP_SELF_SHIELDING_CORRECTION_WS).value
     matchedCorrectionWS = '{}_matched'.format(correctionWS)
     RebinToWorkspace(WorkspaceToRebin=correctionWS,
                      WorkspaceToMatch=mainWS,
                      OutputWorkspace=matchedCorrectionWS)
     correctedWSName = self._names.withSuffix('self_shielding_corrected')
     correctedWS = Divide(LHSWorkspace=mainWS,
                          RHSWorkspace=matchedCorrectionWS,
                          OutputWorkspace=correctedWSName,
                          EnableLogging=self._subalgLogging)
     self._cleanup.cleanup(matchedCorrectionWS)
     self._cleanup.cleanup(mainWS)
     return correctedWS, True
示例#14
0
 def _waterCalibration(self, ws):
     """Divide ws by a (water) reference workspace."""
     if self.getProperty(Prop.WATER_REFERENCE).isDefault:
         return ws
     waterWS = self.getProperty(Prop.WATER_REFERENCE).value
     # input validation for InputWorkspace compatibility, but runs?
     if waterWS.getNumberHistograms() != ws.getNumberHistograms():
         self.log().error('Water workspace and run do not have the same number of histograms.')
     rebinnedWaterWSName = self._names.withSuffix('water_rebinned')
     rebinnedWaterWS = RebinToWorkspace(WorkspaceToRebin=waterWS,
                                        WorkspaceToMatch=ws,
                                        OutputWorkspace=rebinnedWaterWSName,
                                        EnableLogging=self._subalgLogging)
     calibratedWSName = self._names.withSuffix('water_calibrated')
     calibratedWS = Divide(LHSWorkspace=ws,
                           RHSWorkspace=rebinnedWaterWS,
                           OutputWorkspace=calibratedWSName,
                           EnableLogging=self._subalgLogging)
     self._cleanup.cleanup(rebinnedWaterWS)
     self._cleanup.cleanup(ws)
     return calibratedWS
示例#15
0
 def _sumForegroundInLambda(self, ws):
     """Sum the foreground region into a single histogram."""
     foreground = self._foregroundIndices(ws)
     sumIndices = [i for i in range(foreground[0], foreground[2] + 1)]
     beamPosIndex = foreground[1]
     foregroundWSName = self._names.withSuffix('foreground_grouped')
     foregroundWS = ExtractSingleSpectrum(
         InputWorkspace=ws,
         OutputWorkspace=foregroundWSName,
         WorkspaceIndex=beamPosIndex,
         EnableLogging=self._subalgLogging)
     maxIndex = ws.getNumberHistograms() - 1
     foregroundYs = foregroundWS.dataY(0)
     foregroundEs = foregroundWS.dataE(0)
     numpy.square(foregroundEs, out=foregroundEs)
     for i in sumIndices:
         if i == beamPosIndex:
             continue
         if i < 0 or i > maxIndex:
             self.log().warning('Foreground partially out of the workspace.')
         addeeWSName = self._names.withSuffix('foreground_addee')
         addeeWS = ExtractSingleSpectrum(
             InputWorkspace=ws,
             OutputWorkspace=addeeWSName,
             WorkspaceIndex=i,
             EnableLogging=self._subalgLogging)
         addeeWS = RebinToWorkspace(
             WorkspaceToRebin=addeeWS,
             WorkspaceToMatch=foregroundWS,
             OutputWorkspace=addeeWSName,
             EnableLogging=self._subalgLogging)
         ys = addeeWS.readY(0)
         foregroundYs += ys
         es = addeeWS.readE(0)
         foregroundEs += es**2
         self._cleanup.cleanup(addeeWS)
     self._cleanup.cleanup(ws)
     numpy.sqrt(foregroundEs, out=foregroundEs)
     return foregroundWS
示例#16
0
    def _correct_sample_can(self, sample_workspace, container_workspace,
                            factor_workspaces):
        """
        Correct for sample and container.
        """

        logger.information('Correcting sample and container')

        factor_workspaces_wavelength = {
            factor: self._convert_units_wavelength(workspace)
            for factor, workspace in factor_workspaces.items()
        }

        if self._rebin_container_ws:
            container_workspace = RebinToWorkspace(
                WorkspaceToRebin=container_workspace,
                WorkspaceToMatch=factor_workspaces_wavelength['acc'],
                OutputWorkspace="rebinned",
                StoreInADS=False)
        return self._corrections_approximation(sample_workspace,
                                               container_workspace,
                                               factor_workspaces_wavelength)
示例#17
0
 def _run_focus(input_workspace, tof_output_name, curves, grouping_ws,
                region_calib) -> None:
     """Focus the processed full instrument workspace over the chosen region of interest
     :param input_workspace: Processed full instrument workspace converted to dSpacing
     :param tof_output_name: Name for the time-of-flight output workspace
     :param curves: Workspace containing the vanadium curves for this region of interest
     :param grouping_ws: Grouping workspace to pass to DiffractionFocussing
     :param region_calib: Region of interest calibration workspace (table ws output from PDCalibration)
     """
     # rename workspace prior to focussing to avoid errors later
     dspacing_output_name = tof_output_name + "_dSpacing"
     # focus sample over specified region of interest
     focused_sample = DiffractionFocussing(
         InputWorkspace=input_workspace,
         OutputWorkspace=dspacing_output_name,
         GroupingWorkspace=grouping_ws)
     curves_rebinned = RebinToWorkspace(WorkspaceToRebin=curves,
                                        WorkspaceToMatch=focused_sample)
     # flux correction - divide focused sample data by rebinned focused vanadium curve data
     Divide(LHSWorkspace=focused_sample,
            RHSWorkspace=curves_rebinned,
            OutputWorkspace=focused_sample,
            AllowDifferentNumberSpectra=True)
     # apply calibration from specified region of interest
     ApplyDiffCal(InstrumentWorkspace=focused_sample,
                  CalibrationWorkspace=region_calib)
     # set bankid for use in fit tab
     run = focused_sample.getRun()
     if region_calib.name() == "engggui_calibration_bank_1":
         run.addProperty("bankid", 1, True)
     elif region_calib.name() == "engggui_calibration_bank_2":
         run.addProperty("bankid", 2, True)
     else:
         run.addProperty("bankid", 3, True)
     # output in both dSpacing and TOF
     ConvertUnits(InputWorkspace=focused_sample,
                  OutputWorkspace=tof_output_name,
                  Target='TOF')
     DeleteWorkspace(curves_rebinned)
示例#18
0
 def _divideByDirect(self, ws):
     "Divide ws by the direct beam."
     if self.getProperty(Prop.DIRECT_FOREGROUND_WS).isDefault:
         return ws
     directWS = self.getProperty(Prop.DIRECT_FOREGROUND_WS).value
     rebinnedWSName = self._names.withSuffix('rebinned')
     rebinnedWS = RebinToWorkspace(WorkspaceToRebin=ws,
                                   WorkspaceToMatch=directWS,
                                   OutputWorkspace=rebinnedWSName,
                                   EnableLogging=self._subalgLogging)
     self._cleanup.cleanup(ws)
     reflectivityWSName = self._names.withSuffix('reflectivity')
     reflectivityWS = Divide(LHSWorkspace=rebinnedWS,
                             RHSWorkspace=directWS,
                             OutputWorkspace=reflectivityWSName,
                             EnableLogging=self._subalgLogging)
     self._cleanup.cleanup(rebinnedWS)
     reflectivityWS = self._correctForChopperOpenings(
         reflectivityWS, directWS)
     reflectivityWS.setYUnit('Reflectivity')
     reflectivityWS.setYUnitLabel('Reflectivity')
     return reflectivityWS
示例#19
0
def scale_detectors(workspace_name, e_mode='Indirect'):
    """
    Scales detectors by monitor intensity.

    @param workspace_name Name of detector workspace
    @param e_mode Energy mode (Indirect for spectroscopy, Elastic for diffraction)
    """
    from mantid.simpleapi import (ConvertUnits, RebinToWorkspace, Divide)

    monitor_workspace_name = workspace_name + '_mon'

    ConvertUnits(InputWorkspace=workspace_name,
                 OutputWorkspace=workspace_name,
                 Target='Wavelength',
                 EMode=e_mode)

    RebinToWorkspace(WorkspaceToRebin=workspace_name,
                     WorkspaceToMatch=monitor_workspace_name,
                     OutputWorkspace=workspace_name)

    Divide(LHSWorkspace=workspace_name,
           RHSWorkspace=monitor_workspace_name,
           OutputWorkspace=workspace_name)
def TotalScatteringReduction(config=None):
    facility = config['Facility']
    title = config['Title']
    instr = config['Instrument']

    # Get an instance to Mantid's logger
    log = Logger("TotalScatteringReduction")

    # Get sample info
    sample = get_sample(config)
    sam_mass_density = sample.get('MassDensity', None)
    sam_packing_fraction = sample.get('PackingFraction', None)
    sam_geometry = sample.get('Geometry', None)
    sam_material = sample.get('Material', None)

    sam_geo_dict = {
        'Shape': 'Cylinder',
        'Radius': config['Sample']['Geometry']['Radius'],
        'Height': config['Sample']['Geometry']['Height']
    }
    sam_mat_dict = {
        'ChemicalFormula': sam_material,
        'SampleMassDensity': sam_mass_density
    }
    if 'Environment' in config:
        sam_env_dict = {
            'Name': config['Environment']['Name'],
            'Container': config['Environment']['Container']
        }
    else:
        sam_env_dict = {'Name': 'InAir', 'Container': 'PAC06'}
    # Get normalization info
    van = get_normalization(config)
    van_mass_density = van.get('MassDensity', None)
    van_packing_fraction = van.get('PackingFraction', 1.0)
    van_geometry = van.get('Geometry', None)
    van_material = van.get('Material', 'V')

    van_geo_dict = {
        'Shape': 'Cylinder',
        'Radius': config['Normalization']['Geometry']['Radius'],
        'Height': config['Normalization']['Geometry']['Height']
    }
    van_mat_dict = {
        'ChemicalFormula': van_material,
        'SampleMassDensity': van_mass_density
    }

    # Get calibration, characterization, and other settings
    merging = config['Merging']
    binning = merging['QBinning']
    characterizations = merging.get('Characterizations', None)

    # Grouping
    grouping = merging.get('Grouping', None)
    cache_dir = config.get("CacheDir", os.path.abspath('.'))
    OutputDir = config.get("OutputDir", os.path.abspath('.'))

    # Create Nexus file basenames
    sample['Runs'] = expand_ints(sample['Runs'])
    sample['Background']['Runs'] = expand_ints(sample['Background'].get(
        'Runs', None))
    '''
    Currently not implemented:
    # wkspIndices = merging.get('SumBanks', None)
    # high_q_linear_fit_range = config['HighQLinearFitRange']

    POWGEN options not used
    #alignAndFocusArgs['RemovePromptPulseWidth'] = 50
    # alignAndFocusArgs['CompressTolerance'] use defaults
    # alignAndFocusArgs['UnwrapRef'] POWGEN option
    # alignAndFocusArgs['LowResRef'] POWGEN option
    # alignAndFocusArgs['LowResSpectrumOffset'] POWGEN option

    How much of each bank gets merged has info here in the form of
    # {"ID", "Qmin", "QMax"}
    # alignAndFocusArgs['CropWavelengthMin'] from characterizations file
    # alignAndFocusArgs['CropWavelengthMax'] from characterizations file
    '''

    if facility == 'SNS':
        facility_file_format = '%s_%d'
    else:
        facility_file_format = '%s%d'

    sam_scans = ','.join(
        [facility_file_format % (instr, num) for num in sample['Runs']])
    container_scans = ','.join([
        facility_file_format % (instr, num)
        for num in sample['Background']["Runs"]
    ])
    container_bg = None
    if "Background" in sample['Background']:
        sample['Background']['Background']['Runs'] = expand_ints(
            sample['Background']['Background']['Runs'])
        container_bg = ','.join([
            facility_file_format % (instr, num)
            for num in sample['Background']['Background']['Runs']
        ])
        if len(container_bg) == 0:
            container_bg = None

    van['Runs'] = expand_ints(van['Runs'])
    van_scans = ','.join(
        [facility_file_format % (instr, num) for num in van['Runs']])

    van_bg_scans = None
    if 'Background' in van:
        van_bg_scans = van['Background']['Runs']
        van_bg_scans = expand_ints(van_bg_scans)
        van_bg_scans = ','.join(
            [facility_file_format % (instr, num) for num in van_bg_scans])

    # Override Nexus file basename with Filenames if present
    if "Filenames" in sample:
        sam_scans = ','.join(sample["Filenames"])
    if "Filenames" in sample['Background']:
        container_scans = ','.join(sample['Background']["Filenames"])
    if "Background" in sample['Background']:
        if "Filenames" in sample['Background']['Background']:
            container_bg = ','.join(
                sample['Background']['Background']['Filenames'])
    if "Filenames" in van:
        van_scans = ','.join(van["Filenames"])
    if "Background" in van:
        if "Filenames" in van['Background']:
            van_bg_scans = ','.join(van['Background']["Filenames"])

    # Output nexus filename
    nexus_filename = title + '.nxs'
    try:
        os.remove(nexus_filename)
    except OSError:
        pass

    # Get sample corrections
    sam_abs_corr = sample.get("AbsorptionCorrection", None)
    sam_ms_corr = sample.get("MultipleScatteringCorrection", None)
    sam_inelastic_corr = SetInelasticCorrection(
        sample.get('InelasticCorrection', None))

    # Warn about having absorption correction and multiple scat correction set
    if sam_abs_corr and sam_ms_corr:
        log.warning(MS_AND_ABS_CORR_WARNING)

    # Compute the absorption correction on the sample if it was provided
    sam_abs_ws = ''
    con_abs_ws = ''
    if sam_abs_corr:
        msg = "Applying '{}' absorption correction to sample"
        log.notice(msg.format(sam_abs_corr["Type"]))
        sam_abs_ws, con_abs_ws = create_absorption_wksp(
            sam_scans, sam_abs_corr["Type"], sam_geo_dict, sam_mat_dict,
            sam_env_dict, **config)

    # Get vanadium corrections
    van_mass_density = van.get('MassDensity', van_mass_density)
    van_packing_fraction = van.get('PackingFraction', van_packing_fraction)
    van_abs_corr = van.get("AbsorptionCorrection", {"Type": None})
    van_ms_corr = van.get("MultipleScatteringCorrection", {"Type": None})
    van_inelastic_corr = SetInelasticCorrection(
        van.get('InelasticCorrection', None))

    # Warn about having absorption correction and multiple scat correction set
    if van_abs_corr["Type"] and van_ms_corr["Type"]:
        log.warning(MS_AND_ABS_CORR_WARNING)

    # Compute the absorption correction for the vanadium if provided
    van_abs_corr_ws = ''
    if van_abs_corr:
        msg = "Applying '{}' absorption correction to vanadium"
        log.notice(msg.format(van_abs_corr["Type"]))
        van_abs_corr_ws, van_con_ws = create_absorption_wksp(
            van_scans, van_abs_corr["Type"], van_geo_dict, van_mat_dict,
            **config)

    alignAndFocusArgs = dict()
    alignAndFocusArgs['CalFilename'] = config['Calibration']['Filename']
    # alignAndFocusArgs['GroupFilename'] don't use
    # alignAndFocusArgs['Params'] = "0.,0.02,40."
    alignAndFocusArgs['ResampleX'] = -6000
    alignAndFocusArgs['Dspacing'] = False
    alignAndFocusArgs['PreserveEvents'] = False
    alignAndFocusArgs['MaxChunkSize'] = 8
    alignAndFocusArgs['CacheDir'] = os.path.abspath(cache_dir)

    # Get any additional AlignAndFocusArgs from JSON input
    if "AlignAndFocusArgs" in config:
        otherArgs = config["AlignAndFocusArgs"]
        alignAndFocusArgs.update(otherArgs)

    # Setup grouping
    output_grouping = False
    grp_wksp = "wksp_output_group"

    if grouping:
        if 'Initial' in grouping:
            if grouping['Initial'] and not grouping['Initial'] == u'':
                alignAndFocusArgs['GroupFilename'] = grouping['Initial']
        if 'Output' in grouping:
            if grouping['Output'] and not grouping['Output'] == u'':
                output_grouping = True
                LoadDetectorsGroupingFile(InputFile=grouping['Output'],
                                          OutputWorkspace=grp_wksp)
    # If no output grouping specified, create it with Calibration Grouping
    if not output_grouping:
        LoadDiffCal(alignAndFocusArgs['CalFilename'],
                    InstrumentName=instr,
                    WorkspaceName=grp_wksp.replace('_group', ''),
                    MakeGroupingWorkspace=True,
                    MakeCalWorkspace=False,
                    MakeMaskWorkspace=False)

    # Setup the 6 bank method if no grouping specified
    if not grouping:
        CreateGroupingWorkspace(InstrumentName=instr,
                                GroupDetectorsBy='Group',
                                OutputWorkspace=grp_wksp)
        alignAndFocusArgs['GroupingWorkspace'] = grp_wksp

    # TODO take out the RecalculatePCharge in the future once tested
    # Load Sample
    print("#-----------------------------------#")
    print("# Sample")
    print("#-----------------------------------#")
    sam_wksp = load('sample', sam_scans, sam_geometry, sam_material,
                    sam_mass_density, sam_abs_ws, **alignAndFocusArgs)
    sample_title = "sample_and_container"
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    sam_molecular_mass = mtd[sam_wksp].sample().getMaterial(
    ).relativeMolecularMass()
    natoms = getNumberAtoms(sam_packing_fraction,
                            sam_mass_density,
                            sam_molecular_mass,
                            Geometry=sam_geometry)

    # Load Sample Container
    print("#-----------------------------------#")
    print("# Sample Container")
    print("#-----------------------------------#")
    container = load('container',
                     container_scans,
                     absorption_wksp=con_abs_ws,
                     **alignAndFocusArgs)
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Load Sample Container Background

    if container_bg is not None:
        print("#-----------------------------------#")
        print("# Sample Container's Background")
        print("#-----------------------------------#")
        container_bg = load('container_background', container_bg,
                            **alignAndFocusArgs)
        save_banks(InputWorkspace=container_bg,
                   Filename=nexus_filename,
                   Title=container_bg,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Load Vanadium

    print("#-----------------------------------#")
    print("# Vanadium")
    print("#-----------------------------------#")
    van_wksp = load('vanadium', van_scans, van_geometry, van_material,
                    van_mass_density, van_abs_corr_ws, **alignAndFocusArgs)
    vanadium_title = "vanadium_and_background"

    save_banks(InputWorkspace=van_wksp,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    van_material = mtd[van_wksp].sample().getMaterial()
    van_molecular_mass = van_material.relativeMolecularMass()
    nvan_atoms = getNumberAtoms(1.0,
                                van_mass_density,
                                van_molecular_mass,
                                Geometry=van_geometry)

    print("Sample natoms:", natoms)
    print("Vanadium natoms:", nvan_atoms)
    print("Vanadium natoms / Sample natoms:", nvan_atoms / natoms)

    # Load Vanadium Background
    van_bg = None
    if van_bg_scans is not None:
        print("#-----------------------------------#")
        print("# Vanadium Background")
        print("#-----------------------------------#")
        van_bg = load('vanadium_background', van_bg_scans, **alignAndFocusArgs)
        vanadium_bg_title = "vanadium_background"
        save_banks(InputWorkspace=van_bg,
                   Filename=nexus_filename,
                   Title=vanadium_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Load Instrument Characterizations
    if characterizations:
        PDDetermineCharacterizations(
            InputWorkspace=sam_wksp,
            Characterizations='characterizations',
            ReductionProperties='__snspowderreduction')
        propMan = PropertyManagerDataService.retrieve('__snspowderreduction')
        qmax = 2. * np.pi / propMan['d_min'].value
        qmin = 2. * np.pi / propMan['d_max'].value
        for a, b in zip(qmin, qmax):
            print('Qrange:', a, b)
        # TODO: Add when we apply Qmin, Qmax cropping
        # mask_info = generate_cropping_table(qmin, qmax)

    # STEP 1: Subtract Backgrounds

    sam_raw = 'sam_raw'
    CloneWorkspace(InputWorkspace=sam_wksp,
                   OutputWorkspace=sam_raw)  # for later

    container_raw = 'container_raw'
    CloneWorkspace(InputWorkspace=container,
                   OutputWorkspace=container_raw)  # for later

    if van_bg is not None:
        RebinToWorkspace(WorkspaceToRebin=van_bg,
                         WorkspaceToMatch=van_wksp,
                         OutputWorkspace=van_bg)
        Minus(LHSWorkspace=van_wksp,
              RHSWorkspace=van_bg,
              OutputWorkspace=van_wksp)

    RebinToWorkspace(WorkspaceToRebin=container,
                     WorkspaceToMatch=sam_wksp,
                     OutputWorkspace=container)
    Minus(LHSWorkspace=sam_wksp,
          RHSWorkspace=container,
          OutputWorkspace=sam_wksp)

    if container_bg is not None:
        RebinToWorkspace(WorkspaceToRebin=container_bg,
                         WorkspaceToMatch=container,
                         OutputWorkspace=container_bg)
        Minus(LHSWorkspace=container,
              RHSWorkspace=container_bg,
              OutputWorkspace=container)

    for wksp in [container, van_wksp, sam_wksp]:
        ConvertUnits(InputWorkspace=wksp,
                     OutputWorkspace=wksp,
                     Target="MomentumTransfer",
                     EMode="Elastic")
    container_title = "container_minus_back"
    vanadium_title = "vanadium_minus_back"
    sample_title = "sample_minus_back"
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=van_wksp,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 2.0: Prepare vanadium as normalization calibrant

    # Multiple-Scattering and Absorption (Steps 2-4) for Vanadium

    van_corrected = 'van_corrected'
    ConvertUnits(InputWorkspace=van_wksp,
                 OutputWorkspace=van_corrected,
                 Target="Wavelength",
                 EMode="Elastic")

    if "Type" in van_abs_corr:
        if van_abs_corr['Type'] == 'Carpenter' \
                or van_ms_corr['Type'] == 'Carpenter':
            CarpenterSampleCorrection(
                InputWorkspace=van_corrected,
                OutputWorkspace=van_corrected,
                CylinderSampleRadius=van['Geometry']['Radius'])
        elif van_abs_corr['Type'] == 'Mayers' \
                or van_ms_corr['Type'] == 'Mayers':
            if van_ms_corr['Type'] == 'Mayers':
                MayersSampleCorrection(InputWorkspace=van_corrected,
                                       OutputWorkspace=van_corrected,
                                       MultipleScattering=True)
            else:
                MayersSampleCorrection(InputWorkspace=van_corrected,
                                       OutputWorkspace=van_corrected,
                                       MultipleScattering=False)
        else:
            print("NO VANADIUM absorption or multiple scattering!")
    else:
        CloneWorkspace(InputWorkspace=van_corrected,
                       OutputWorkspace=van_corrected)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')
    vanadium_title += "_ms_abs_corrected"
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title + "_with_peaks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # TODO subtract self-scattering of vanadium (According to Eq. 7 of Howe,
    # McGreevey, and Howells, JPCM, 1989)

    # Smooth Vanadium (strip peaks plus smooth)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='dSpacing',
                 EMode='Elastic')

    # After StripVanadiumPeaks, the workspace goes from EventWorkspace ->
    # Workspace2D
    StripVanadiumPeaks(InputWorkspace=van_corrected,
                       OutputWorkspace=van_corrected,
                       BackgroundType='Quadratic')
    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')
    vanadium_title += '_peaks_stripped'
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='TOF',
                 EMode='Elastic')

    FFTSmooth(InputWorkspace=van_corrected,
              OutputWorkspace=van_corrected,
              Filter="Butterworth",
              Params='20,2',
              IgnoreXBins=True,
              AllSpectra=True)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    vanadium_title += '_smoothed'
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Inelastic correction
    if van_inelastic_corr['Type'] == "Placzek":
        van_scan = van['Runs'][0]
        van_incident_wksp = 'van_incident_wksp'
        van_inelastic_opts = van['InelasticCorrection']
        lambda_binning_fit = van_inelastic_opts['LambdaBinningForFit']
        lambda_binning_calc = van_inelastic_opts['LambdaBinningForCalc']
        print('van_scan:', van_scan)
        GetIncidentSpectrumFromMonitor(Filename=facility_file_format %
                                       (instr, van_scan),
                                       OutputWorkspace=van_incident_wksp)

        fit_type = van['InelasticCorrection']['FitSpectrumWith']
        FitIncidentSpectrum(InputWorkspace=van_incident_wksp,
                            OutputWorkspace=van_incident_wksp,
                            FitSpectrumWith=fit_type,
                            BinningForFit=lambda_binning_fit,
                            BinningForCalc=lambda_binning_calc,
                            PlotDiagnostics=False)

        van_placzek = 'van_placzek'

        SetSample(InputWorkspace=van_incident_wksp,
                  Material={
                      'ChemicalFormula': str(van_material),
                      'SampleMassDensity': str(van_mass_density)
                  })

        CalculatePlaczekSelfScattering(IncidentWorkspace=van_incident_wksp,
                                       ParentWorkspace=van_corrected,
                                       OutputWorkspace=van_placzek,
                                       L1=19.5,
                                       L2=alignAndFocusArgs['L2'],
                                       Polar=alignAndFocusArgs['Polar'])

        ConvertToHistogram(InputWorkspace=van_placzek,
                           OutputWorkspace=van_placzek)

        # Save before rebin in Q
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=binning,
                  PreserveEvents=True)

        save_banks(InputWorkspace=van_placzek,
                   Filename=nexus_filename,
                   Title="vanadium_placzek",
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

        # Rebin in Wavelength
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='Wavelength',
                         EMode='Elastic')
            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=lambda_binning_calc,
                  PreserveEvents=True)

        # Save after rebin in Q
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        # Subtract correction in Wavelength
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='Wavelength',
                         EMode='Elastic')
            if not mtd[wksp].isDistribution():
                ConvertToDistribution(wksp)

        Minus(LHSWorkspace=van_corrected,
              RHSWorkspace=van_placzek,
              OutputWorkspace=van_corrected)

        # Save after subtraction
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        vanadium_title += '_placzek_corrected'
        save_banks(InputWorkspace=van_corrected,
                   Filename=nexus_filename,
                   Title=vanadium_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    SetUncertainties(InputWorkspace=van_corrected,
                     OutputWorkspace=van_corrected,
                     SetError='zero')

    # STEP 2.1: Normalize by Vanadium

    wksp_list = [sam_wksp, sam_raw, van_corrected]
    for name in wksp_list:
        ConvertUnits(InputWorkspace=name,
                     OutputWorkspace=name,
                     Target='MomentumTransfer',
                     EMode='Elastic',
                     ConvertFromPointData=False)

        Rebin(InputWorkspace=name,
              OutputWorkspace=name,
              Params=binning,
              PreserveEvents=True)

    # Save the sample - back / normalized
    Divide(LHSWorkspace=sam_wksp,
           RHSWorkspace=van_corrected,
           OutputWorkspace=sam_wksp)

    sample_title += "_normalized"
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the sample / normalized (ie no background subtraction)
    Divide(LHSWorkspace=sam_raw,
           RHSWorkspace=van_corrected,
           OutputWorkspace=sam_raw)

    save_banks(InputWorkspace=sam_raw,
               Filename=nexus_filename,
               Title="sample_normalized",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Output an initial I(Q) for sample
    iq_filename = title + '_initial_iofq_banks.nxs'
    save_banks(InputWorkspace=sam_wksp,
               Filename=iq_filename,
               Title="IQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    wksp_list = [container, container_raw, van_corrected]
    if container_bg is not None:
        wksp_list.append(container_bg)
    if van_bg is not None:
        wksp_list.append(van_bg)

    for name in wksp_list:
        ConvertUnits(InputWorkspace=name,
                     OutputWorkspace=name,
                     Target='MomentumTransfer',
                     EMode='Elastic',
                     ConvertFromPointData=False)

        Rebin(InputWorkspace=name,
              OutputWorkspace=name,
              Params=binning,
              PreserveEvents=True)

    # Save the container - container_background / normalized
    Divide(LHSWorkspace=container,
           RHSWorkspace=van_corrected,
           OutputWorkspace=container)

    container_title += '_normalized'
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the container / normalized (ie no background subtraction)
    Divide(LHSWorkspace=container_raw,
           RHSWorkspace=van_corrected,
           OutputWorkspace=container_raw)

    save_banks(InputWorkspace=container_raw,
               Filename=nexus_filename,
               Title="container_normalized",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the container_background / normalized
    if container_bg is not None:
        Divide(LHSWorkspace=container_bg,
               RHSWorkspace=van_corrected,
               OutputWorkspace=container_bg)

        container_bg_title = "container_back_normalized"
        save_banks(InputWorkspace=container_bg,
                   Filename=nexus_filename,
                   Title=container_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Save the vanadium_background / normalized
    if van_bg is not None:
        Divide(LHSWorkspace=van_bg,
               RHSWorkspace=van_corrected,
               OutputWorkspace=van_bg)

        vanadium_bg_title += "_normalized"
        save_banks(InputWorkspace=van_bg,
                   Filename=nexus_filename,
                   Title=vanadium_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # STEP 3 & 4: Subtract multiple scattering and apply absorption correction

    ConvertUnits(InputWorkspace=sam_wksp,
                 OutputWorkspace=sam_wksp,
                 Target="Wavelength",
                 EMode="Elastic")

    sam_corrected = 'sam_corrected'
    if sam_abs_corr and sam_ms_corr:
        if sam_abs_corr['Type'] == 'Carpenter' \
                or sam_ms_corr['Type'] == 'Carpenter':
            CarpenterSampleCorrection(
                InputWorkspace=sam_wksp,
                OutputWorkspace=sam_corrected,
                CylinderSampleRadius=sample['Geometry']['Radius'])
        elif sam_abs_corr['Type'] == 'Mayers' \
                or sam_ms_corr['Type'] == 'Mayers':
            if sam_ms_corr['Type'] == 'Mayers':
                MayersSampleCorrection(InputWorkspace=sam_wksp,
                                       OutputWorkspace=sam_corrected,
                                       MultipleScattering=True)
            else:
                MayersSampleCorrection(InputWorkspace=sam_wksp,
                                       OutputWorkspace=sam_corrected,
                                       MultipleScattering=False)
        else:
            print("NO SAMPLE absorption or multiple scattering!")
            CloneWorkspace(InputWorkspace=sam_wksp,
                           OutputWorkspace=sam_corrected)

        ConvertUnits(InputWorkspace=sam_corrected,
                     OutputWorkspace=sam_corrected,
                     Target='MomentumTransfer',
                     EMode='Elastic')

        sample_title += "_ms_abs_corrected"
        save_banks(InputWorkspace=sam_corrected,
                   Filename=nexus_filename,
                   Title=sample_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)
    else:
        CloneWorkspace(InputWorkspace=sam_wksp, OutputWorkspace=sam_corrected)

    # STEP 5: Divide by number of atoms in sample

    mtd[sam_corrected] = (nvan_atoms / natoms) * mtd[sam_corrected]
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    sample_title += "_norm_by_atoms"
    save_banks(InputWorkspace=sam_corrected,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 6: Divide by total scattering length squared = total scattering
    # cross-section over 4 * pi
    van_material = mtd[van_corrected].sample().getMaterial()
    sigma_v = van_material.totalScatterXSection()
    prefactor = (sigma_v / (4. * np.pi))
    msg = "Total scattering cross-section of Vanadium:{} sigma_v / 4*pi: {}"
    print(msg.format(sigma_v, prefactor))

    mtd[sam_corrected] = prefactor * mtd[sam_corrected]
    sample_title += '_multiply_by_vanSelfScat'
    save_banks(InputWorkspace=sam_corrected,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 7: Inelastic correction
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target='Wavelength',
                 EMode='Elastic')

    if sam_inelastic_corr['Type'] == "Placzek":
        if sam_material is None:
            error = "For Placzek correction, must specifiy a sample material."
            raise Exception(error)
        for sam_scan in sample['Runs']:
            sam_incident_wksp = 'sam_incident_wksp'
            sam_inelastic_opts = sample['InelasticCorrection']
            lambda_binning_fit = sam_inelastic_opts['LambdaBinningForFit']
            lambda_binning_calc = sam_inelastic_opts['LambdaBinningForCalc']
            GetIncidentSpectrumFromMonitor(Filename=facility_file_format %
                                           (instr, sam_scan),
                                           OutputWorkspace=sam_incident_wksp)

            fit_type = sample['InelasticCorrection']['FitSpectrumWith']
            FitIncidentSpectrum(InputWorkspace=sam_incident_wksp,
                                OutputWorkspace=sam_incident_wksp,
                                FitSpectrumWith=fit_type,
                                BinningForFit=lambda_binning_fit,
                                BinningForCalc=lambda_binning_calc)

            sam_placzek = 'sam_placzek'
            SetSample(InputWorkspace=sam_incident_wksp,
                      Material={
                          'ChemicalFormula': str(sam_material),
                          'SampleMassDensity': str(sam_mass_density)
                      })
            CalculatePlaczekSelfScattering(IncidentWorkspace=sam_incident_wksp,
                                           ParentWorkspace=sam_corrected,
                                           OutputWorkspace=sam_placzek,
                                           L1=19.5,
                                           L2=alignAndFocusArgs['L2'],
                                           Polar=alignAndFocusArgs['Polar'])

            ConvertToHistogram(InputWorkspace=sam_placzek,
                               OutputWorkspace=sam_placzek)

        # Save before rebin in Q
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=binning,
                  PreserveEvents=True)

        save_banks(InputWorkspace=sam_placzek,
                   Filename=nexus_filename,
                   Title="sample_placzek",
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

        # Save after rebin in Q
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        Minus(LHSWorkspace=sam_corrected,
              RHSWorkspace=sam_placzek,
              OutputWorkspace=sam_corrected)

        # Save after subtraction
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        sample_title += '_placzek_corrected'
        save_banks(InputWorkspace=sam_corrected,
                   Filename=nexus_filename,
                   Title=sample_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # STEP 7: Output spectrum

    # TODO Since we already went from Event -> 2D workspace, can't use this
    # anymore
    print('sam:', mtd[sam_corrected].id())
    print('van:', mtd[van_corrected].id())
    if alignAndFocusArgs['PreserveEvents']:
        CompressEvents(InputWorkspace=sam_corrected,
                       OutputWorkspace=sam_corrected)

    # F(Q) bank-by-bank Section
    fq_banks_wksp = "FQ_banks_wksp"
    CloneWorkspace(InputWorkspace=sam_corrected, OutputWorkspace=fq_banks_wksp)
    # TODO: Add the following when implemented - FQ_banks = 'FQ_banks'

    # S(Q) bank-by-bank Section
    material = mtd[sam_corrected].sample().getMaterial()
    if material.name() is None or len(material.name().strip()) == 0:
        raise RuntimeError('Sample material was not set')
    bcoh_avg_sqrd = material.cohScatterLength() * material.cohScatterLength()
    btot_sqrd_avg = material.totalScatterLengthSqrd()
    laue_monotonic_diffuse_scat = btot_sqrd_avg / bcoh_avg_sqrd
    sq_banks_wksp = 'SQ_banks_wksp'
    CloneWorkspace(InputWorkspace=sam_corrected, OutputWorkspace=sq_banks_wksp)

    # TODO: Add the following when implemented
    '''
    SQ_banks = (1. / bcoh_avg_sqrd) * \
        mtd[sq_banks_wksp] - laue_monotonic_diffuse_scat + 1.
    '''

    # Save S(Q) and F(Q) to diagnostics NeXus file
    save_banks(InputWorkspace=fq_banks_wksp,
               Filename=nexus_filename,
               Title="FQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    save_banks(InputWorkspace=sq_banks_wksp,
               Filename=nexus_filename,
               Title="SQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Output a main S(Q) and F(Q) file
    fq_filename = title + '_fofq_banks_corrected.nxs'
    save_banks(InputWorkspace=fq_banks_wksp,
               Filename=fq_filename,
               Title="FQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    sq_filename = title + '_sofq_banks_corrected.nxs'
    save_banks(InputWorkspace=sq_banks_wksp,
               Filename=sq_filename,
               Title="SQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Print log information
    print("<b>^2:", bcoh_avg_sqrd)
    print("<b^2>:", btot_sqrd_avg)
    print("Laue term:", laue_monotonic_diffuse_scat)
    print("sample total xsection:",
          mtd[sam_corrected].sample().getMaterial().totalScatterXSection())
    print("vanadium total xsection:",
          mtd[van_corrected].sample().getMaterial().totalScatterXSection())

    # Output Bragg Diffraction
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target="TOF",
                 EMode="Elastic")

    ConvertToHistogram(InputWorkspace=sam_corrected,
                       OutputWorkspace=sam_corrected)

    xmin, xmax = get_each_spectra_xmin_xmax(mtd[sam_corrected])

    CropWorkspaceRagged(InputWorkspace=sam_corrected,
                        OutputWorkspace=sam_corrected,
                        Xmin=xmin,
                        Xmax=xmax)

    xmin_rebin = min(xmin)
    xmax_rebin = max(xmax)
    tof_binning = "{xmin},-0.01,{xmax}".format(xmin=xmin_rebin,
                                               xmax=xmax_rebin)

    Rebin(InputWorkspace=sam_corrected,
          OutputWorkspace=sam_corrected,
          Params=tof_binning)

    SaveGSS(InputWorkspace=sam_corrected,
            Filename=os.path.join(os.path.abspath(OutputDir), title + ".gsa"),
            SplitFiles=False,
            Append=False,
            MultiplyByBinWidth=True,
            Format="SLOG",
            ExtendedHeader=True)

    return mtd[sam_corrected]
示例#21
0
    def _sumForegroundInLambda(self, ws):
        """Sum the foreground region into a single histogram."""
        foreground = self._foregroundIndices(ws)
        sumIndices = [i for i in range(foreground[0], foreground[2] + 1)]
        beamPosIndex = foreground[1]
        foregroundWSName = self._names.withSuffix('grouped')
        foregroundWS = ExtractSingleSpectrum(InputWorkspace=ws,
                                             OutputWorkspace=foregroundWSName,
                                             WorkspaceIndex=beamPosIndex,
                                             EnableLogging=self._subalgLogging)
        maxIndex = ws.getNumberHistograms() - 1
        foregroundYs = foregroundWS.dataY(0)
        foregroundEs = foregroundWS.dataE(0)
        numpy.square(foregroundEs, out=foregroundEs)
        for i in sumIndices:
            if i == beamPosIndex:
                continue
            if i < 0 or i > maxIndex:
                self.log().warning(
                    'Foreground partially out of the workspace.')
            addeeWSName = self._names.withSuffix('addee')
            addeeWS = ExtractSingleSpectrum(InputWorkspace=ws,
                                            OutputWorkspace=addeeWSName,
                                            WorkspaceIndex=i,
                                            EnableLogging=self._subalgLogging)
            addeeWS = RebinToWorkspace(WorkspaceToRebin=addeeWS,
                                       WorkspaceToMatch=foregroundWS,
                                       OutputWorkspace=addeeWSName,
                                       EnableLogging=self._subalgLogging)
            ys = addeeWS.readY(0)
            foregroundYs += ys
            es = addeeWS.readE(0)
            foregroundEs += es**2
            self._cleanup.cleanup(addeeWS)
        self._cleanup.cleanup(ws)
        numpy.sqrt(foregroundEs, out=foregroundEs)
        # Move the detector to the fractional linePosition
        linePosition = ws.run().getProperty(
            common.SampleLogs.LINE_POSITION).value
        instr = common.instrumentName(ws)
        pixelSize = common.pixelSize(instr)
        dist = pixelSize * (linePosition - beamPosIndex)

        if dist != 0.:
            detPoint1 = ws.spectrumInfo().position(0)
            detPoint2 = ws.spectrumInfo().position(20)
            beta = numpy.math.atan2((detPoint2[0] - detPoint1[0]),
                                    (detPoint2[2] - detPoint1[2]))
            xvsy = numpy.math.sin(beta) * dist
            mz = numpy.math.cos(beta) * dist
            if instr == 'D17':
                mx = xvsy
                my = 0.0
                rotationAxis = [0, 1, 0]
            else:
                mx = 0.0
                my = xvsy
                rotationAxis = [-1, 0, 0]
            MoveInstrumentComponent(Workspace=foregroundWS,
                                    ComponentName='detector',
                                    X=mx,
                                    Y=my,
                                    Z=mz,
                                    RelativePosition=True)
            theta = foregroundWS.spectrumInfo().twoTheta(0) / 2.
            RotateInstrumentComponent(Workspace=foregroundWS,
                                      ComponentName='detector',
                                      X=rotationAxis[0],
                                      Y=rotationAxis[1],
                                      Z=rotationAxis[2],
                                      Angle=theta,
                                      RelativeRotation=True)
        return foregroundWS
示例#22
0
    def __processFile(self, filename, file_prog_start,
                      determineCharacterizations,
                      createUnfocused):  # noqa: C902,C901
        # create a unique name for the workspace
        wkspname = '__' + self.__wkspNameFromFile(filename)
        wkspname += '_f%d' % self._filenames.index(
            filename)  # add file number to be unique
        unfocusname = ''
        if createUnfocused:
            unfocusname = wkspname + '_unfocused'

        # check for a cachefilename
        cachefile = self.__getCacheName(self.__wkspNameFromFile(filename))
        self.log().information('looking for cachefile "{}"'.format(cachefile))
        if (not createUnfocused
            ) and self.useCaching and os.path.exists(cachefile):
            try:
                if self.__loadCacheFile(cachefile, wkspname):
                    return wkspname, ''
            except RuntimeError as e:
                # log as a warning and carry on as though the cache file didn't exist
                self.log().warning('Failed to load cache file "{}": {}'.format(
                    cachefile, e))
        else:
            self.log().information('not using cache')

        chunks = determineChunking(filename, self.chunkSize)
        numSteps = 6  # for better progress reporting - 6 steps per chunk
        if createUnfocused:
            numSteps = 7  # one more for accumulating the unfocused workspace
        self.log().information('Processing \'{}\' in {:d} chunks'.format(
            filename, len(chunks)))
        prog_per_chunk_step = self.prog_per_file * 1. / (numSteps *
                                                         float(len(chunks)))

        unfocusname_chunk = ''
        canSkipLoadingLogs = False

        # inner loop is over chunks
        haveAccumulationForFile = False
        for (j, chunk) in enumerate(chunks):
            prog_start = file_prog_start + float(j) * float(
                numSteps - 1) * prog_per_chunk_step

            # if reading all at once, put the data into the final name directly
            if len(chunks) == 1:
                chunkname = wkspname
                unfocusname_chunk = unfocusname
            else:
                chunkname = '{}_c{:d}'.format(wkspname, j)
                if unfocusname:  # only create unfocus chunk if needed
                    unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j)

            # load a chunk - this is a bit crazy long because we need to get an output property from `Load` when it
            # is run and the algorithm history doesn't exist until the parent algorithm (this) has finished
            loader = self.__createLoader(
                filename,
                chunkname,
                skipLoadingLogs=(len(chunks) > 1 and canSkipLoadingLogs
                                 and haveAccumulationForFile),
                progstart=prog_start,
                progstop=prog_start + prog_per_chunk_step,
                **chunk)
            loader.execute()
            if j == 0:
                self.__setupCalibration(chunkname)

            # copy the necessary logs onto the workspace
            if len(chunks
                   ) > 1 and canSkipLoadingLogs and haveAccumulationForFile:
                CopyLogs(InputWorkspace=wkspname,
                         OutputWorkspace=chunkname,
                         MergeStrategy='WipeExisting')
                # re-load instrument so detector positions that depend on logs get initialized
                try:
                    LoadIDFFromNexus(Workspace=chunkname,
                                     Filename=filename,
                                     InstrumentParentPath='/entry')
                except RuntimeError as e:
                    self.log().warning(
                        'Reloading instrument using "LoadIDFFromNexus" failed: {}'
                        .format(e))

            # get the underlying loader name if we used the generic one
            if self.__loaderName == 'Load':
                self.__loaderName = loader.getPropertyValue('LoaderName')
            # only LoadEventNexus can turn off loading logs, but FilterBadPulses
            # requires them to be loaded from the file
            canSkipLoadingLogs = self.__loaderName == 'LoadEventNexus' and self.filterBadPulses <= 0. and haveAccumulationForFile

            if determineCharacterizations and j == 0:
                self.__determineCharacterizations(
                    filename, chunkname)  # updates instance variable
                determineCharacterizations = False

            if self.__loaderName == 'LoadEventNexus' and mtd[
                    chunkname].getNumberEvents() == 0:
                self.log().notice(
                    'Chunk {} of {} contained no events. Skipping to next chunk.'
                    .format(j + 1, len(chunks)))
                continue

            prog_start += prog_per_chunk_step
            if self.filterBadPulses > 0.:
                FilterBadPulses(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                LowerCutoff=self.filterBadPulses,
                                startProgress=prog_start,
                                endProgress=prog_start + prog_per_chunk_step)
                if mtd[chunkname].getNumberEvents() == 0:
                    msg = 'FilterBadPulses removed all events from '
                    if len(chunks) == 1:
                        raise RuntimeError(msg + filename)
                    else:
                        raise RuntimeError(msg + 'chunk {} of {} in {}'.format(
                            j, len(chunks), filename))

            prog_start += prog_per_chunk_step

            # absorption correction workspace
            if self.absorption is not None and len(str(self.absorption)) > 0:
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='Wavelength',
                             EMode='Elastic')
                # rebin the absorption correction to match the binning of the inputs if in histogram mode
                # EventWorkspace will compare the wavelength of each individual event
                absWksp = self.absorption
                if mtd[chunkname].id() != 'EventWorkspace':
                    absWksp = '__absWkspRebinned'
                    RebinToWorkspace(WorkspaceToRebin=self.absorption,
                                     WorkspaceToMatch=chunkname,
                                     OutputWorkspace=absWksp)
                Divide(LHSWorkspace=chunkname,
                       RHSWorkspace=absWksp,
                       OutputWorkspace=chunkname,
                       startProgress=prog_start,
                       endProgress=prog_start + prog_per_chunk_step)
                if absWksp != self.absorption:  # clean up
                    DeleteWorkspace(Workspace=absWksp)
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='TOF',
                             EMode='Elastic')
            prog_start += prog_per_chunk_step

            if self.kwargs is None:
                raise RuntimeError(
                    'Somehow arguments for "AlignAndFocusPowder" aren\'t set')

            AlignAndFocusPowder(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                UnfocussedWorkspace=unfocusname_chunk,
                                startProgress=prog_start,
                                endProgress=prog_start +
                                2. * prog_per_chunk_step,
                                **self.kwargs)
            prog_start += 2. * prog_per_chunk_step  # AlignAndFocusPowder counts for two steps

            self.__accumulate(chunkname,
                              wkspname,
                              unfocusname_chunk,
                              unfocusname,
                              not haveAccumulationForFile,
                              removelogs=canSkipLoadingLogs)

            haveAccumulationForFile = True
        # end of inner loop
        if not mtd.doesExist(wkspname):
            raise RuntimeError(
                'Failed to process any data from file "{}"'.format(filename))

        # copy the sample object from the absorption workspace
        if self.absorption is not None and len(str(self.absorption)) > 0:
            CopySample(InputWorkspace=self.absorption,
                       OutputWorkspace=wkspname,
                       CopyEnvironment=False)

        # write out the cachefile for the main reduced data independent of whether
        # the unfocussed workspace was requested
        if self.useCaching and not os.path.exists(cachefile):
            self.log().information(
                'Saving data to cachefile "{}"'.format(cachefile))
            SaveNexusProcessed(InputWorkspace=wkspname, Filename=cachefile)

        return wkspname, unfocusname