Example #1
0
 def create_grouping_workspace_from_spectra_list(self):
     """
     Create grouping workspace for ROI defined as a list of spectrum numbers
     """
     grp_ws, _, _ = CreateGroupingWorkspace(
         InstrumentName=self.instrument,
         OutputWorkspace=GROUP_WS_NAMES[self.group])
     for spec in self.spectra_list:
         det_ids = grp_ws.getDetectorIDs(spec - 1)
         grp_ws.setValue(det_ids[0], 1)
     self.group_ws = grp_ws
Example #2
0
 def create_grouping_workspace_from_calfile(self):
     """
     Create grouping workspace for ROI defined in .cal file
     """
     grp_ws, _, _ = CreateGroupingWorkspace(
         InstrumentName=self.instrument,
         OldCalFilename=self.cal_filepath,
         OutputWorkspace=GROUP_WS_NAMES[self.group])
     self.group_ws = grp_ws
Example #3
0
def group_pixels_2theta(vulcan_ws_name, tth_group_ws_name, start_iws, end_iws,
                        two_theta_bin_range, two_theta_step):

    # create group workspace
    CreateGroupingWorkspace(InputWorkspace=vulcan_ws_name, GroupDetectorsBy='All',
                            OutputWorkspace=tth_group_ws_name)

    # Get workspace
    vulcan_ws = mantid_helper.retrieve_workspace(vulcan_ws_name, True)
    group_ws = mantid_helper.retrieve_workspace(tth_group_ws_name, True)

    # Calculate 2theta for each pixels that is interested
    two_theta_array = numpy.arange(two_theta_bin_range[0], two_theta_bin_range[1] + two_theta_step,
                                   two_theta_step, dtype='float')
    num_2theta = two_theta_array.shape[0]
    num_pixels_array = numpy.zeros(shape=two_theta_array.shape, dtype='int')

    # source and sample position
    source = vulcan_ws.getInstrument().getSource().getPos()
    sample = vulcan_ws.getInstrument().getSample().getPos()
    # Calculate 2theta for each detectors
    for iws in range(0, vulcan_ws.getNumberHistograms()):
        if iws < start_iws or iws >= end_iws:
            # set to group 0 to ignore
            group_ws.dataY(iws)[0] = 0

        else:
            # interested
            det_i = vulcan_ws.getDetector(iws).getPos()
            two_theta_i = (det_i - sample).angle(sample - source) * 180. / numpy.pi
            if two_theta_i < two_theta_array[0] or two_theta_i >= two_theta_array[-1]:
                group_ws.dataY(iws)[0] = 0
            elif two_theta_i == two_theta_array[0]:
                group_ws.dataY(iws)[0] = 1
                num_pixels_array[0] += 1
            else:
                i_2theta = numpy.searchsorted(two_theta_array, [two_theta_i])[0]
                if i_2theta <= 0 or i_2theta >= num_2theta:
                    raise RuntimeError('Programming error!')
                group_ws.dataY(iws)[0] = i_2theta
                num_pixels_array[i_2theta-1] += 1
        # END-IF-ELSE
    # END-FOR

    # deal with zero-count-instance
    num_pixels_array[numpy.where(num_pixels_array < 0.1)] = -1

    return two_theta_array, group_ws, num_pixels_array
Example #4
0
    def _generateGrouping(self, runnumber, metaWS, progress):
        group_to_real = {'Banks': 'Group', 'Modules': 'bank', '2_4 Grouping': '2_4Grouping'}
        group = self.getProperty('GroupDetectorsBy').value
        real_name = group_to_real.get(group, group)

        if not mtd.doesExist(group):
            if group == '2_4 Grouping':
                group = '2_4_Grouping'

            if not metaWS :
                metaWS = self._loadMetaWS(runnumber)
            CreateGroupingWorkspace(InputWorkspace=metaWS, GroupDetectorsBy=real_name,
                                    OutputWorkspace=group)
            progress.report('create grouping')
        else:
            progress.report()

        return group
Example #5
0
def create_test_ws_and_group():
    myFunc = "name=Gaussian, PeakCentre=2, Height=100, Sigma=0.01;" + \
        "name=Gaussian, PeakCentre=1, Height=100, Sigma=0.01;" + \
        "name=Gaussian, PeakCentre=4, Height=100, Sigma=0.01"
    ws = CreateSampleWorkspace("Event",
                               "User Defined",
                               myFunc,
                               BankPixelWidth=1,
                               XUnit='dSpacing',
                               XMax=5,
                               BinWidth=0.001,
                               NumEvents=100000,
                               NumBanks=8)
    for n in range(1, 5):
        MoveInstrumentComponent(ws,
                                ComponentName=f'bank{n}',
                                X=1 + n / 10,
                                Y=0,
                                Z=1 + n / 10,
                                RelativePosition=False)
        MoveInstrumentComponent(ws,
                                ComponentName=f'bank{n+4}',
                                X=2 + n / 10,
                                Y=0,
                                Z=2 + n / 10,
                                RelativePosition=False)

    MaskDetectors(ws, WorkspaceIndexList=[3, 7])

    ws = ScaleX(ws, Factor=1.05, IndexMin=1, IndexMax=1)
    ws = ScaleX(ws, Factor=0.95, IndexMin=2, IndexMax=2)
    ws = ScaleX(ws, Factor=1.05, IndexMin=4, IndexMax=6)
    ws = ScaleX(ws, Factor=1.02, IndexMin=5, IndexMax=5)
    ws = ScaleX(ws, Factor=0.98, IndexMin=6, IndexMax=6)
    ws = Rebin(ws, '0,0.001,5')
    ws = ConvertUnits(ws, Target='TOF')

    groups, _, _ = CreateGroupingWorkspace(InputWorkspace=ws,
                                           ComponentName='basic_rect',
                                           CustomGroupingString='1-4,5-8')

    return ws, groups
Example #6
0
    def runTest(self):
        # 11MB file, process in 4 chunks
        kwargs = {
            'RunNumbers': '45874',
            'GroupDetectorsBy': 'Banks',
            'Binning': [.5, -.004, 7],
            'MaxChunkSize': .01
        }

        # create grouping for two output spectra
        CreateGroupingWorkspace(InstrumentFilename='SNAP_Definition.xml',
                                GroupDetectorsBy='Group',
                                OutputWorkspace='SNAP_grouping')
        _assert_reduction_configuration(kwargs)
        SNAPReduce(**kwargs)
        RenameWorkspace(InputWorkspace='SNAP_45874_Banks_red',
                        OutputWorkspace='with_chunks')

        # process without chunks
        kwargs['MaxChunkSize'] = 0
        SNAPReduce(**kwargs)
        RenameWorkspace(InputWorkspace='SNAP_45874_Banks_red',
                        OutputWorkspace='no_chunks')
Example #7
0
 def create_bank_grouping_workspace(self):
     """
     Create grouping workspace for ROI corresponding to one or more banks
     """
     ws_name = GROUP_WS_NAMES[self.group]
     grp_ws = None
     try:
         grp_ws = LoadDetectorsGroupingFile(InputFile=path.join(
             CALIB_DIR, GROUP_FILES[self.group]),
                                            OutputWorkspace=ws_name)
     except ValueError:
         logger.notice(
             "Grouping file not found in user directories - creating one")
         if self.group.banks and self.group != GROUP.TEXTURE20 and self.group != GROUP.TEXTURE30:
             grp_ws, _, _ = CreateGroupingWorkspace(
                 InstrumentName=self.instrument,
                 OutputWorkspace=ws_name,
                 GroupNames=GROUP_BANK_ARGS[self.group])
     if grp_ws:
         self.group_ws = grp_ws
     else:
         raise ValueError(
             "Could not find or create grouping requested - make sure the directory of the grouping.xml"
             " files is on the path")
Example #8
0
    def PyExec(self):
        # Retrieve all relevant notice

        in_Runs = self.getProperty("RunNumbers").value

        maskWSname = self._getMaskWSname()

        # either type of file-based calibration is stored in the same variable
        calib = self.getProperty("Calibration").value
        if calib == "Calibration File":
            cal_File = self.getProperty("CalibrationFilename").value
        elif calib == 'DetCal File':
            cal_File = self.getProperty('DetCalFilename').value
            cal_File = ','.join(cal_File)
        else:
            cal_File = None

        params = self.getProperty("Binning").value
        norm = self.getProperty("Normalization").value

        if norm == "From Processed Nexus":
            norm_File = self.getProperty("NormalizationFilename").value
            LoadNexusProcessed(Filename=norm_File, OutputWorkspace='normWS')
            normWS = 'normWS'
        elif norm == "From Workspace":
            normWS = str(self.getProperty("NormalizationWorkspace").value)
        else:
            normWS = None

        group_to_real = {
            'Banks': 'Group',
            'Modules': 'bank',
            '2_4 Grouping': '2_4Grouping'
        }
        group = self.getProperty('GroupDetectorsBy').value
        real_name = group_to_real.get(group, group)

        if not mtd.doesExist(group):
            if group == '2_4 Grouping':
                group = '2_4_Grouping'
            CreateGroupingWorkspace(InstrumentName='SNAP',
                                    GroupDetectorsBy=real_name,
                                    OutputWorkspace=group)

        Process_Mode = self.getProperty("ProcessingMode").value

        prefix = self.getProperty("OptionalPrefix").value

        # --------------------------- REDUCE DATA -----------------------------

        Tag = 'SNAP'
        for r in in_Runs:
            self.log().notice("processing run %s" % r)
            self.log().information(str(self.get_IPTS_Local(r)))
            if self.getProperty("LiveData").value:
                Tag = 'Live'
                LoadPreNexusLive(Instrument='SNAP', OutputWorkspace='WS')
            else:
                Load(Filename='SNAP' + str(r), OutputWorkspace='WS')
                NormaliseByCurrent(InputWorkspace='WS', OutputWorkspace='WS')

            CompressEvents(InputWorkspace='WS', OutputWorkspace='WS')
            CropWorkspace(InputWorkspace='WS',
                          OutputWorkspace='WS',
                          XMax=50000)
            RemovePromptPulse(InputWorkspace='WS',
                              OutputWorkspace='WS',
                              Width='1600',
                              Frequency='60.4')

            if maskWSname is not None:
                MaskDetectors(Workspace='WS', MaskedWorkspace=maskWSname)

            self._alignAndFocus(params, calib, cal_File, group)

            normWS = self._generateNormalization('WS_red', norm, normWS)
            WS_nor = None
            if normWS is not None:
                WS_nor = 'WS_nor'
                Divide(LHSWorkspace='WS_red',
                       RHSWorkspace=normWS,
                       OutputWorkspace='WS_nor')
                ReplaceSpecialValues(Inputworkspace='WS_nor',
                                     OutputWorkspace='WS_nor',
                                     NaNValue='0',
                                     NaNError='0',
                                     InfinityValue='0',
                                     InfinityError='0')

            new_Tag = Tag
            if len(prefix) > 0:
                new_Tag += '_' + prefix

            # Edit instrument geomety to make final workspace smaller on disk
            det_table = PreprocessDetectorsToMD(
                Inputworkspace='WS_red', OutputWorkspace='__SNAP_det_table')
            polar = np.degrees(det_table.column('TwoTheta'))
            azi = np.degrees(det_table.column('Azimuthal'))
            EditInstrumentGeometry(Workspace='WS_red',
                                   L2=det_table.column('L2'),
                                   Polar=polar,
                                   Azimuthal=azi)
            if WS_nor is not None:
                EditInstrumentGeometry(Workspace='WS_nor',
                                       L2=det_table.column('L2'),
                                       Polar=polar,
                                       Azimuthal=azi)
            mtd.remove('__SNAP_det_table')

            # Save requested formats
            basename = '%s_%s_%s' % (new_Tag, r, group)
            self._save(r, basename, norm)

            # temporary workspace no longer needed
            DeleteWorkspace(Workspace='WS')

            # rename everything as appropriate and determine output workspace name
            RenameWorkspace(Inputworkspace='WS_d',
                            OutputWorkspace='%s_%s_d' % (new_Tag, r))
            RenameWorkspace(Inputworkspace='WS_red',
                            OutputWorkspace=basename + '_red')
            if norm == 'None':
                outputWksp = basename + '_red'
            else:
                outputWksp = basename + '_nor'
                RenameWorkspace(Inputworkspace='WS_nor',
                                OutputWorkspace=basename + '_nor')
            if norm == "Extracted from Data":
                RenameWorkspace(Inputworkspace='peak_clip_WS',
                                OutputWorkspace='%s_%s_normalizer' %
                                (new_Tag, r))

            # delte some things in production
            if Process_Mode == "Production":
                DeleteWorkspace(Workspace='%s_%s_d' %
                                (new_Tag, r))  # was 'WS_d'

                if norm != "None":
                    DeleteWorkspace(Workspace=basename +
                                    '_red')  # was 'WS_red'

                if norm == "Extracted from Data":
                    DeleteWorkspace(Workspace='%s_%s_normalizer' %
                                    (new_Tag, r))  # was 'peak_clip_WS'

            propertyName = 'OutputWorkspace_' + str(outputWksp)
            self.declareProperty(
                WorkspaceProperty(propertyName, outputWksp, Direction.Output))
            self.setProperty(propertyName, outputWksp)
def TotalScatteringReduction(config=None):
    facility = config['Facility']
    title = config['Title']
    instr = config['Instrument']

    # Get an instance to Mantid's logger
    log = Logger("TotalScatteringReduction")

    # Get sample info
    sample = get_sample(config)
    sam_mass_density = sample.get('MassDensity', None)
    sam_packing_fraction = sample.get('PackingFraction', None)
    sam_geometry = sample.get('Geometry', None)
    sam_material = sample.get('Material', None)

    sam_geo_dict = {
        'Shape': 'Cylinder',
        'Radius': config['Sample']['Geometry']['Radius'],
        'Height': config['Sample']['Geometry']['Height']
    }
    sam_mat_dict = {
        'ChemicalFormula': sam_material,
        'SampleMassDensity': sam_mass_density
    }
    if 'Environment' in config:
        sam_env_dict = {
            'Name': config['Environment']['Name'],
            'Container': config['Environment']['Container']
        }
    else:
        sam_env_dict = {'Name': 'InAir', 'Container': 'PAC06'}
    # Get normalization info
    van = get_normalization(config)
    van_mass_density = van.get('MassDensity', None)
    van_packing_fraction = van.get('PackingFraction', 1.0)
    van_geometry = van.get('Geometry', None)
    van_material = van.get('Material', 'V')

    van_geo_dict = {
        'Shape': 'Cylinder',
        'Radius': config['Normalization']['Geometry']['Radius'],
        'Height': config['Normalization']['Geometry']['Height']
    }
    van_mat_dict = {
        'ChemicalFormula': van_material,
        'SampleMassDensity': van_mass_density
    }

    # Get calibration, characterization, and other settings
    merging = config['Merging']
    binning = merging['QBinning']
    characterizations = merging.get('Characterizations', None)

    # Grouping
    grouping = merging.get('Grouping', None)
    cache_dir = config.get("CacheDir", os.path.abspath('.'))
    OutputDir = config.get("OutputDir", os.path.abspath('.'))

    # Create Nexus file basenames
    sample['Runs'] = expand_ints(sample['Runs'])
    sample['Background']['Runs'] = expand_ints(sample['Background'].get(
        'Runs', None))
    '''
    Currently not implemented:
    # wkspIndices = merging.get('SumBanks', None)
    # high_q_linear_fit_range = config['HighQLinearFitRange']

    POWGEN options not used
    #alignAndFocusArgs['RemovePromptPulseWidth'] = 50
    # alignAndFocusArgs['CompressTolerance'] use defaults
    # alignAndFocusArgs['UnwrapRef'] POWGEN option
    # alignAndFocusArgs['LowResRef'] POWGEN option
    # alignAndFocusArgs['LowResSpectrumOffset'] POWGEN option

    How much of each bank gets merged has info here in the form of
    # {"ID", "Qmin", "QMax"}
    # alignAndFocusArgs['CropWavelengthMin'] from characterizations file
    # alignAndFocusArgs['CropWavelengthMax'] from characterizations file
    '''

    if facility == 'SNS':
        facility_file_format = '%s_%d'
    else:
        facility_file_format = '%s%d'

    sam_scans = ','.join(
        [facility_file_format % (instr, num) for num in sample['Runs']])
    container_scans = ','.join([
        facility_file_format % (instr, num)
        for num in sample['Background']["Runs"]
    ])
    container_bg = None
    if "Background" in sample['Background']:
        sample['Background']['Background']['Runs'] = expand_ints(
            sample['Background']['Background']['Runs'])
        container_bg = ','.join([
            facility_file_format % (instr, num)
            for num in sample['Background']['Background']['Runs']
        ])
        if len(container_bg) == 0:
            container_bg = None

    van['Runs'] = expand_ints(van['Runs'])
    van_scans = ','.join(
        [facility_file_format % (instr, num) for num in van['Runs']])

    van_bg_scans = None
    if 'Background' in van:
        van_bg_scans = van['Background']['Runs']
        van_bg_scans = expand_ints(van_bg_scans)
        van_bg_scans = ','.join(
            [facility_file_format % (instr, num) for num in van_bg_scans])

    # Override Nexus file basename with Filenames if present
    if "Filenames" in sample:
        sam_scans = ','.join(sample["Filenames"])
    if "Filenames" in sample['Background']:
        container_scans = ','.join(sample['Background']["Filenames"])
    if "Background" in sample['Background']:
        if "Filenames" in sample['Background']['Background']:
            container_bg = ','.join(
                sample['Background']['Background']['Filenames'])
    if "Filenames" in van:
        van_scans = ','.join(van["Filenames"])
    if "Background" in van:
        if "Filenames" in van['Background']:
            van_bg_scans = ','.join(van['Background']["Filenames"])

    # Output nexus filename
    nexus_filename = title + '.nxs'
    try:
        os.remove(nexus_filename)
    except OSError:
        pass

    # Get sample corrections
    sam_abs_corr = sample.get("AbsorptionCorrection", None)
    sam_ms_corr = sample.get("MultipleScatteringCorrection", None)
    sam_inelastic_corr = SetInelasticCorrection(
        sample.get('InelasticCorrection', None))

    # Warn about having absorption correction and multiple scat correction set
    if sam_abs_corr and sam_ms_corr:
        log.warning(MS_AND_ABS_CORR_WARNING)

    # Compute the absorption correction on the sample if it was provided
    sam_abs_ws = ''
    con_abs_ws = ''
    if sam_abs_corr:
        msg = "Applying '{}' absorption correction to sample"
        log.notice(msg.format(sam_abs_corr["Type"]))
        sam_abs_ws, con_abs_ws = create_absorption_wksp(
            sam_scans, sam_abs_corr["Type"], sam_geo_dict, sam_mat_dict,
            sam_env_dict, **config)

    # Get vanadium corrections
    van_mass_density = van.get('MassDensity', van_mass_density)
    van_packing_fraction = van.get('PackingFraction', van_packing_fraction)
    van_abs_corr = van.get("AbsorptionCorrection", {"Type": None})
    van_ms_corr = van.get("MultipleScatteringCorrection", {"Type": None})
    van_inelastic_corr = SetInelasticCorrection(
        van.get('InelasticCorrection', None))

    # Warn about having absorption correction and multiple scat correction set
    if van_abs_corr["Type"] and van_ms_corr["Type"]:
        log.warning(MS_AND_ABS_CORR_WARNING)

    # Compute the absorption correction for the vanadium if provided
    van_abs_corr_ws = ''
    if van_abs_corr:
        msg = "Applying '{}' absorption correction to vanadium"
        log.notice(msg.format(van_abs_corr["Type"]))
        van_abs_corr_ws, van_con_ws = create_absorption_wksp(
            van_scans, van_abs_corr["Type"], van_geo_dict, van_mat_dict,
            **config)

    alignAndFocusArgs = dict()
    alignAndFocusArgs['CalFilename'] = config['Calibration']['Filename']
    # alignAndFocusArgs['GroupFilename'] don't use
    # alignAndFocusArgs['Params'] = "0.,0.02,40."
    alignAndFocusArgs['ResampleX'] = -6000
    alignAndFocusArgs['Dspacing'] = False
    alignAndFocusArgs['PreserveEvents'] = False
    alignAndFocusArgs['MaxChunkSize'] = 8
    alignAndFocusArgs['CacheDir'] = os.path.abspath(cache_dir)

    # Get any additional AlignAndFocusArgs from JSON input
    if "AlignAndFocusArgs" in config:
        otherArgs = config["AlignAndFocusArgs"]
        alignAndFocusArgs.update(otherArgs)

    # Setup grouping
    output_grouping = False
    grp_wksp = "wksp_output_group"

    if grouping:
        if 'Initial' in grouping:
            if grouping['Initial'] and not grouping['Initial'] == u'':
                alignAndFocusArgs['GroupFilename'] = grouping['Initial']
        if 'Output' in grouping:
            if grouping['Output'] and not grouping['Output'] == u'':
                output_grouping = True
                LoadDetectorsGroupingFile(InputFile=grouping['Output'],
                                          OutputWorkspace=grp_wksp)
    # If no output grouping specified, create it with Calibration Grouping
    if not output_grouping:
        LoadDiffCal(alignAndFocusArgs['CalFilename'],
                    InstrumentName=instr,
                    WorkspaceName=grp_wksp.replace('_group', ''),
                    MakeGroupingWorkspace=True,
                    MakeCalWorkspace=False,
                    MakeMaskWorkspace=False)

    # Setup the 6 bank method if no grouping specified
    if not grouping:
        CreateGroupingWorkspace(InstrumentName=instr,
                                GroupDetectorsBy='Group',
                                OutputWorkspace=grp_wksp)
        alignAndFocusArgs['GroupingWorkspace'] = grp_wksp

    # TODO take out the RecalculatePCharge in the future once tested
    # Load Sample
    print("#-----------------------------------#")
    print("# Sample")
    print("#-----------------------------------#")
    sam_wksp = load('sample', sam_scans, sam_geometry, sam_material,
                    sam_mass_density, sam_abs_ws, **alignAndFocusArgs)
    sample_title = "sample_and_container"
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    sam_molecular_mass = mtd[sam_wksp].sample().getMaterial(
    ).relativeMolecularMass()
    natoms = getNumberAtoms(sam_packing_fraction,
                            sam_mass_density,
                            sam_molecular_mass,
                            Geometry=sam_geometry)

    # Load Sample Container
    print("#-----------------------------------#")
    print("# Sample Container")
    print("#-----------------------------------#")
    container = load('container',
                     container_scans,
                     absorption_wksp=con_abs_ws,
                     **alignAndFocusArgs)
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Load Sample Container Background

    if container_bg is not None:
        print("#-----------------------------------#")
        print("# Sample Container's Background")
        print("#-----------------------------------#")
        container_bg = load('container_background', container_bg,
                            **alignAndFocusArgs)
        save_banks(InputWorkspace=container_bg,
                   Filename=nexus_filename,
                   Title=container_bg,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Load Vanadium

    print("#-----------------------------------#")
    print("# Vanadium")
    print("#-----------------------------------#")
    van_wksp = load('vanadium', van_scans, van_geometry, van_material,
                    van_mass_density, van_abs_corr_ws, **alignAndFocusArgs)
    vanadium_title = "vanadium_and_background"

    save_banks(InputWorkspace=van_wksp,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    van_material = mtd[van_wksp].sample().getMaterial()
    van_molecular_mass = van_material.relativeMolecularMass()
    nvan_atoms = getNumberAtoms(1.0,
                                van_mass_density,
                                van_molecular_mass,
                                Geometry=van_geometry)

    print("Sample natoms:", natoms)
    print("Vanadium natoms:", nvan_atoms)
    print("Vanadium natoms / Sample natoms:", nvan_atoms / natoms)

    # Load Vanadium Background
    van_bg = None
    if van_bg_scans is not None:
        print("#-----------------------------------#")
        print("# Vanadium Background")
        print("#-----------------------------------#")
        van_bg = load('vanadium_background', van_bg_scans, **alignAndFocusArgs)
        vanadium_bg_title = "vanadium_background"
        save_banks(InputWorkspace=van_bg,
                   Filename=nexus_filename,
                   Title=vanadium_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Load Instrument Characterizations
    if characterizations:
        PDDetermineCharacterizations(
            InputWorkspace=sam_wksp,
            Characterizations='characterizations',
            ReductionProperties='__snspowderreduction')
        propMan = PropertyManagerDataService.retrieve('__snspowderreduction')
        qmax = 2. * np.pi / propMan['d_min'].value
        qmin = 2. * np.pi / propMan['d_max'].value
        for a, b in zip(qmin, qmax):
            print('Qrange:', a, b)
        # TODO: Add when we apply Qmin, Qmax cropping
        # mask_info = generate_cropping_table(qmin, qmax)

    # STEP 1: Subtract Backgrounds

    sam_raw = 'sam_raw'
    CloneWorkspace(InputWorkspace=sam_wksp,
                   OutputWorkspace=sam_raw)  # for later

    container_raw = 'container_raw'
    CloneWorkspace(InputWorkspace=container,
                   OutputWorkspace=container_raw)  # for later

    if van_bg is not None:
        RebinToWorkspace(WorkspaceToRebin=van_bg,
                         WorkspaceToMatch=van_wksp,
                         OutputWorkspace=van_bg)
        Minus(LHSWorkspace=van_wksp,
              RHSWorkspace=van_bg,
              OutputWorkspace=van_wksp)

    RebinToWorkspace(WorkspaceToRebin=container,
                     WorkspaceToMatch=sam_wksp,
                     OutputWorkspace=container)
    Minus(LHSWorkspace=sam_wksp,
          RHSWorkspace=container,
          OutputWorkspace=sam_wksp)

    if container_bg is not None:
        RebinToWorkspace(WorkspaceToRebin=container_bg,
                         WorkspaceToMatch=container,
                         OutputWorkspace=container_bg)
        Minus(LHSWorkspace=container,
              RHSWorkspace=container_bg,
              OutputWorkspace=container)

    for wksp in [container, van_wksp, sam_wksp]:
        ConvertUnits(InputWorkspace=wksp,
                     OutputWorkspace=wksp,
                     Target="MomentumTransfer",
                     EMode="Elastic")
    container_title = "container_minus_back"
    vanadium_title = "vanadium_minus_back"
    sample_title = "sample_minus_back"
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=van_wksp,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 2.0: Prepare vanadium as normalization calibrant

    # Multiple-Scattering and Absorption (Steps 2-4) for Vanadium

    van_corrected = 'van_corrected'
    ConvertUnits(InputWorkspace=van_wksp,
                 OutputWorkspace=van_corrected,
                 Target="Wavelength",
                 EMode="Elastic")

    if "Type" in van_abs_corr:
        if van_abs_corr['Type'] == 'Carpenter' \
                or van_ms_corr['Type'] == 'Carpenter':
            CarpenterSampleCorrection(
                InputWorkspace=van_corrected,
                OutputWorkspace=van_corrected,
                CylinderSampleRadius=van['Geometry']['Radius'])
        elif van_abs_corr['Type'] == 'Mayers' \
                or van_ms_corr['Type'] == 'Mayers':
            if van_ms_corr['Type'] == 'Mayers':
                MayersSampleCorrection(InputWorkspace=van_corrected,
                                       OutputWorkspace=van_corrected,
                                       MultipleScattering=True)
            else:
                MayersSampleCorrection(InputWorkspace=van_corrected,
                                       OutputWorkspace=van_corrected,
                                       MultipleScattering=False)
        else:
            print("NO VANADIUM absorption or multiple scattering!")
    else:
        CloneWorkspace(InputWorkspace=van_corrected,
                       OutputWorkspace=van_corrected)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')
    vanadium_title += "_ms_abs_corrected"
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title + "_with_peaks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # TODO subtract self-scattering of vanadium (According to Eq. 7 of Howe,
    # McGreevey, and Howells, JPCM, 1989)

    # Smooth Vanadium (strip peaks plus smooth)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='dSpacing',
                 EMode='Elastic')

    # After StripVanadiumPeaks, the workspace goes from EventWorkspace ->
    # Workspace2D
    StripVanadiumPeaks(InputWorkspace=van_corrected,
                       OutputWorkspace=van_corrected,
                       BackgroundType='Quadratic')
    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')
    vanadium_title += '_peaks_stripped'
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='TOF',
                 EMode='Elastic')

    FFTSmooth(InputWorkspace=van_corrected,
              OutputWorkspace=van_corrected,
              Filter="Butterworth",
              Params='20,2',
              IgnoreXBins=True,
              AllSpectra=True)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    vanadium_title += '_smoothed'
    save_banks(InputWorkspace=van_corrected,
               Filename=nexus_filename,
               Title=vanadium_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Inelastic correction
    if van_inelastic_corr['Type'] == "Placzek":
        van_scan = van['Runs'][0]
        van_incident_wksp = 'van_incident_wksp'
        van_inelastic_opts = van['InelasticCorrection']
        lambda_binning_fit = van_inelastic_opts['LambdaBinningForFit']
        lambda_binning_calc = van_inelastic_opts['LambdaBinningForCalc']
        print('van_scan:', van_scan)
        GetIncidentSpectrumFromMonitor(Filename=facility_file_format %
                                       (instr, van_scan),
                                       OutputWorkspace=van_incident_wksp)

        fit_type = van['InelasticCorrection']['FitSpectrumWith']
        FitIncidentSpectrum(InputWorkspace=van_incident_wksp,
                            OutputWorkspace=van_incident_wksp,
                            FitSpectrumWith=fit_type,
                            BinningForFit=lambda_binning_fit,
                            BinningForCalc=lambda_binning_calc,
                            PlotDiagnostics=False)

        van_placzek = 'van_placzek'

        SetSample(InputWorkspace=van_incident_wksp,
                  Material={
                      'ChemicalFormula': str(van_material),
                      'SampleMassDensity': str(van_mass_density)
                  })

        CalculatePlaczekSelfScattering(IncidentWorkspace=van_incident_wksp,
                                       ParentWorkspace=van_corrected,
                                       OutputWorkspace=van_placzek,
                                       L1=19.5,
                                       L2=alignAndFocusArgs['L2'],
                                       Polar=alignAndFocusArgs['Polar'])

        ConvertToHistogram(InputWorkspace=van_placzek,
                           OutputWorkspace=van_placzek)

        # Save before rebin in Q
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=binning,
                  PreserveEvents=True)

        save_banks(InputWorkspace=van_placzek,
                   Filename=nexus_filename,
                   Title="vanadium_placzek",
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

        # Rebin in Wavelength
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='Wavelength',
                         EMode='Elastic')
            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=lambda_binning_calc,
                  PreserveEvents=True)

        # Save after rebin in Q
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        # Subtract correction in Wavelength
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='Wavelength',
                         EMode='Elastic')
            if not mtd[wksp].isDistribution():
                ConvertToDistribution(wksp)

        Minus(LHSWorkspace=van_corrected,
              RHSWorkspace=van_placzek,
              OutputWorkspace=van_corrected)

        # Save after subtraction
        for wksp in [van_placzek, van_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        vanadium_title += '_placzek_corrected'
        save_banks(InputWorkspace=van_corrected,
                   Filename=nexus_filename,
                   Title=vanadium_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    ConvertUnits(InputWorkspace=van_corrected,
                 OutputWorkspace=van_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    SetUncertainties(InputWorkspace=van_corrected,
                     OutputWorkspace=van_corrected,
                     SetError='zero')

    # STEP 2.1: Normalize by Vanadium

    wksp_list = [sam_wksp, sam_raw, van_corrected]
    for name in wksp_list:
        ConvertUnits(InputWorkspace=name,
                     OutputWorkspace=name,
                     Target='MomentumTransfer',
                     EMode='Elastic',
                     ConvertFromPointData=False)

        Rebin(InputWorkspace=name,
              OutputWorkspace=name,
              Params=binning,
              PreserveEvents=True)

    # Save the sample - back / normalized
    Divide(LHSWorkspace=sam_wksp,
           RHSWorkspace=van_corrected,
           OutputWorkspace=sam_wksp)

    sample_title += "_normalized"
    save_banks(InputWorkspace=sam_wksp,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the sample / normalized (ie no background subtraction)
    Divide(LHSWorkspace=sam_raw,
           RHSWorkspace=van_corrected,
           OutputWorkspace=sam_raw)

    save_banks(InputWorkspace=sam_raw,
               Filename=nexus_filename,
               Title="sample_normalized",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Output an initial I(Q) for sample
    iq_filename = title + '_initial_iofq_banks.nxs'
    save_banks(InputWorkspace=sam_wksp,
               Filename=iq_filename,
               Title="IQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    wksp_list = [container, container_raw, van_corrected]
    if container_bg is not None:
        wksp_list.append(container_bg)
    if van_bg is not None:
        wksp_list.append(van_bg)

    for name in wksp_list:
        ConvertUnits(InputWorkspace=name,
                     OutputWorkspace=name,
                     Target='MomentumTransfer',
                     EMode='Elastic',
                     ConvertFromPointData=False)

        Rebin(InputWorkspace=name,
              OutputWorkspace=name,
              Params=binning,
              PreserveEvents=True)

    # Save the container - container_background / normalized
    Divide(LHSWorkspace=container,
           RHSWorkspace=van_corrected,
           OutputWorkspace=container)

    container_title += '_normalized'
    save_banks(InputWorkspace=container,
               Filename=nexus_filename,
               Title=container_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the container / normalized (ie no background subtraction)
    Divide(LHSWorkspace=container_raw,
           RHSWorkspace=van_corrected,
           OutputWorkspace=container_raw)

    save_banks(InputWorkspace=container_raw,
               Filename=nexus_filename,
               Title="container_normalized",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Save the container_background / normalized
    if container_bg is not None:
        Divide(LHSWorkspace=container_bg,
               RHSWorkspace=van_corrected,
               OutputWorkspace=container_bg)

        container_bg_title = "container_back_normalized"
        save_banks(InputWorkspace=container_bg,
                   Filename=nexus_filename,
                   Title=container_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # Save the vanadium_background / normalized
    if van_bg is not None:
        Divide(LHSWorkspace=van_bg,
               RHSWorkspace=van_corrected,
               OutputWorkspace=van_bg)

        vanadium_bg_title += "_normalized"
        save_banks(InputWorkspace=van_bg,
                   Filename=nexus_filename,
                   Title=vanadium_bg_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # STEP 3 & 4: Subtract multiple scattering and apply absorption correction

    ConvertUnits(InputWorkspace=sam_wksp,
                 OutputWorkspace=sam_wksp,
                 Target="Wavelength",
                 EMode="Elastic")

    sam_corrected = 'sam_corrected'
    if sam_abs_corr and sam_ms_corr:
        if sam_abs_corr['Type'] == 'Carpenter' \
                or sam_ms_corr['Type'] == 'Carpenter':
            CarpenterSampleCorrection(
                InputWorkspace=sam_wksp,
                OutputWorkspace=sam_corrected,
                CylinderSampleRadius=sample['Geometry']['Radius'])
        elif sam_abs_corr['Type'] == 'Mayers' \
                or sam_ms_corr['Type'] == 'Mayers':
            if sam_ms_corr['Type'] == 'Mayers':
                MayersSampleCorrection(InputWorkspace=sam_wksp,
                                       OutputWorkspace=sam_corrected,
                                       MultipleScattering=True)
            else:
                MayersSampleCorrection(InputWorkspace=sam_wksp,
                                       OutputWorkspace=sam_corrected,
                                       MultipleScattering=False)
        else:
            print("NO SAMPLE absorption or multiple scattering!")
            CloneWorkspace(InputWorkspace=sam_wksp,
                           OutputWorkspace=sam_corrected)

        ConvertUnits(InputWorkspace=sam_corrected,
                     OutputWorkspace=sam_corrected,
                     Target='MomentumTransfer',
                     EMode='Elastic')

        sample_title += "_ms_abs_corrected"
        save_banks(InputWorkspace=sam_corrected,
                   Filename=nexus_filename,
                   Title=sample_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)
    else:
        CloneWorkspace(InputWorkspace=sam_wksp, OutputWorkspace=sam_corrected)

    # STEP 5: Divide by number of atoms in sample

    mtd[sam_corrected] = (nvan_atoms / natoms) * mtd[sam_corrected]
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target='MomentumTransfer',
                 EMode='Elastic')

    sample_title += "_norm_by_atoms"
    save_banks(InputWorkspace=sam_corrected,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 6: Divide by total scattering length squared = total scattering
    # cross-section over 4 * pi
    van_material = mtd[van_corrected].sample().getMaterial()
    sigma_v = van_material.totalScatterXSection()
    prefactor = (sigma_v / (4. * np.pi))
    msg = "Total scattering cross-section of Vanadium:{} sigma_v / 4*pi: {}"
    print(msg.format(sigma_v, prefactor))

    mtd[sam_corrected] = prefactor * mtd[sam_corrected]
    sample_title += '_multiply_by_vanSelfScat'
    save_banks(InputWorkspace=sam_corrected,
               Filename=nexus_filename,
               Title=sample_title,
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # STEP 7: Inelastic correction
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target='Wavelength',
                 EMode='Elastic')

    if sam_inelastic_corr['Type'] == "Placzek":
        if sam_material is None:
            error = "For Placzek correction, must specifiy a sample material."
            raise Exception(error)
        for sam_scan in sample['Runs']:
            sam_incident_wksp = 'sam_incident_wksp'
            sam_inelastic_opts = sample['InelasticCorrection']
            lambda_binning_fit = sam_inelastic_opts['LambdaBinningForFit']
            lambda_binning_calc = sam_inelastic_opts['LambdaBinningForCalc']
            GetIncidentSpectrumFromMonitor(Filename=facility_file_format %
                                           (instr, sam_scan),
                                           OutputWorkspace=sam_incident_wksp)

            fit_type = sample['InelasticCorrection']['FitSpectrumWith']
            FitIncidentSpectrum(InputWorkspace=sam_incident_wksp,
                                OutputWorkspace=sam_incident_wksp,
                                FitSpectrumWith=fit_type,
                                BinningForFit=lambda_binning_fit,
                                BinningForCalc=lambda_binning_calc)

            sam_placzek = 'sam_placzek'
            SetSample(InputWorkspace=sam_incident_wksp,
                      Material={
                          'ChemicalFormula': str(sam_material),
                          'SampleMassDensity': str(sam_mass_density)
                      })
            CalculatePlaczekSelfScattering(IncidentWorkspace=sam_incident_wksp,
                                           ParentWorkspace=sam_corrected,
                                           OutputWorkspace=sam_placzek,
                                           L1=19.5,
                                           L2=alignAndFocusArgs['L2'],
                                           Polar=alignAndFocusArgs['Polar'])

            ConvertToHistogram(InputWorkspace=sam_placzek,
                               OutputWorkspace=sam_placzek)

        # Save before rebin in Q
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

            Rebin(InputWorkspace=wksp,
                  OutputWorkspace=wksp,
                  Params=binning,
                  PreserveEvents=True)

        save_banks(InputWorkspace=sam_placzek,
                   Filename=nexus_filename,
                   Title="sample_placzek",
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

        # Save after rebin in Q
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        Minus(LHSWorkspace=sam_corrected,
              RHSWorkspace=sam_placzek,
              OutputWorkspace=sam_corrected)

        # Save after subtraction
        for wksp in [sam_placzek, sam_corrected]:
            ConvertUnits(InputWorkspace=wksp,
                         OutputWorkspace=wksp,
                         Target='MomentumTransfer',
                         EMode='Elastic')

        sample_title += '_placzek_corrected'
        save_banks(InputWorkspace=sam_corrected,
                   Filename=nexus_filename,
                   Title=sample_title,
                   OutputDir=OutputDir,
                   GroupingWorkspace=grp_wksp,
                   Binning=binning)

    # STEP 7: Output spectrum

    # TODO Since we already went from Event -> 2D workspace, can't use this
    # anymore
    print('sam:', mtd[sam_corrected].id())
    print('van:', mtd[van_corrected].id())
    if alignAndFocusArgs['PreserveEvents']:
        CompressEvents(InputWorkspace=sam_corrected,
                       OutputWorkspace=sam_corrected)

    # F(Q) bank-by-bank Section
    fq_banks_wksp = "FQ_banks_wksp"
    CloneWorkspace(InputWorkspace=sam_corrected, OutputWorkspace=fq_banks_wksp)
    # TODO: Add the following when implemented - FQ_banks = 'FQ_banks'

    # S(Q) bank-by-bank Section
    material = mtd[sam_corrected].sample().getMaterial()
    if material.name() is None or len(material.name().strip()) == 0:
        raise RuntimeError('Sample material was not set')
    bcoh_avg_sqrd = material.cohScatterLength() * material.cohScatterLength()
    btot_sqrd_avg = material.totalScatterLengthSqrd()
    laue_monotonic_diffuse_scat = btot_sqrd_avg / bcoh_avg_sqrd
    sq_banks_wksp = 'SQ_banks_wksp'
    CloneWorkspace(InputWorkspace=sam_corrected, OutputWorkspace=sq_banks_wksp)

    # TODO: Add the following when implemented
    '''
    SQ_banks = (1. / bcoh_avg_sqrd) * \
        mtd[sq_banks_wksp] - laue_monotonic_diffuse_scat + 1.
    '''

    # Save S(Q) and F(Q) to diagnostics NeXus file
    save_banks(InputWorkspace=fq_banks_wksp,
               Filename=nexus_filename,
               Title="FQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    save_banks(InputWorkspace=sq_banks_wksp,
               Filename=nexus_filename,
               Title="SQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Output a main S(Q) and F(Q) file
    fq_filename = title + '_fofq_banks_corrected.nxs'
    save_banks(InputWorkspace=fq_banks_wksp,
               Filename=fq_filename,
               Title="FQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    sq_filename = title + '_sofq_banks_corrected.nxs'
    save_banks(InputWorkspace=sq_banks_wksp,
               Filename=sq_filename,
               Title="SQ_banks",
               OutputDir=OutputDir,
               GroupingWorkspace=grp_wksp,
               Binning=binning)

    # Print log information
    print("<b>^2:", bcoh_avg_sqrd)
    print("<b^2>:", btot_sqrd_avg)
    print("Laue term:", laue_monotonic_diffuse_scat)
    print("sample total xsection:",
          mtd[sam_corrected].sample().getMaterial().totalScatterXSection())
    print("vanadium total xsection:",
          mtd[van_corrected].sample().getMaterial().totalScatterXSection())

    # Output Bragg Diffraction
    ConvertUnits(InputWorkspace=sam_corrected,
                 OutputWorkspace=sam_corrected,
                 Target="TOF",
                 EMode="Elastic")

    ConvertToHistogram(InputWorkspace=sam_corrected,
                       OutputWorkspace=sam_corrected)

    xmin, xmax = get_each_spectra_xmin_xmax(mtd[sam_corrected])

    CropWorkspaceRagged(InputWorkspace=sam_corrected,
                        OutputWorkspace=sam_corrected,
                        Xmin=xmin,
                        Xmax=xmax)

    xmin_rebin = min(xmin)
    xmax_rebin = max(xmax)
    tof_binning = "{xmin},-0.01,{xmax}".format(xmin=xmin_rebin,
                                               xmax=xmax_rebin)

    Rebin(InputWorkspace=sam_corrected,
          OutputWorkspace=sam_corrected,
          Params=tof_binning)

    SaveGSS(InputWorkspace=sam_corrected,
            Filename=os.path.join(os.path.abspath(OutputDir), title + ".gsa"),
            SplitFiles=False,
            Append=False,
            MultiplyByBinWidth=True,
            Format="SLOG",
            ExtendedHeader=True)

    return mtd[sam_corrected]
def process_json(json_filename):
    """This will read a json file, process the data and save the calibration.

    Only ``Calibrant`` and ``Groups`` are required.

    An example input showing every possible options is:

    .. code-block:: JSON

      {
        "Calibrant": "12345",
        "Groups": "/path/to/groups.xml",
        "Mask": "/path/to/mask.xml",
        "Instrument": "NOM",
        "Date" : "2019_09_04",
        "SampleEnvironment": "shifter",
        "PreviousCalibration": "/path/to/cal.h5",
        "CalDirectory": "/path/to/output_directory",
        "CrossCorrelate": {"Step": 0.001,
                           "DReference: 1.5,
                           "Xmin": 1.0,
                           "Xmax": 3.0,
                           "MaxDSpaceShift": 0.25},
        "PDCalibration": {"PeakPositions": [1, 2, 3],
                          "TofBinning": (300,0.001,16666),
                          "PeakFunction": 'Gaussian',
                          "PeakWindow": 0.1,
                          "PeakWidthPercent": 0.001}
      }
    """
    with open(json_filename) as json_file:
        args = json.load(json_file)

    calibrant_file = args.get('CalibrantFile', None)
    if calibrant_file is None:
        calibrant = args['Calibrant']
    groups = args['Groups']
    out_groups_by = args.get('OutputGroupsBy', 'Group')
    sample_env = args.get('SampleEnvironment', 'UnknownSampleEnvironment')
    mask = args.get('Mask')
    instrument = args.get('Instrument', 'NOM')
    cc_kwargs = args.get('CrossCorrelate', {})
    pdcal_kwargs = args.get('PDCalibration', {})
    previous_calibration = args.get('PreviousCalibration')

    date = str(args.get('Date', datetime.datetime.now().strftime('%Y_%m_%d')))
    caldirectory = str(args.get('CalDirectory', os.path.abspath('.')))

    if calibrant_file is not None:
        ws = Load(calibrant_file)
        calibrant = ws.getRun().getProperty('run_number').value
    else:
        filename = f'{instrument}_{calibrant}'
        ws = Load(filename)

    calfilename = f'{caldirectory}/{instrument}_{calibrant}_{date}_{sample_env}.h5'
    logger.notice(f'going to create calibration file: {calfilename}')

    groups = LoadDetectorsGroupingFile(groups, InputWorkspace=ws)

    if mask:
        mask = LoadMask(instrument, mask)
        MaskDetectors(ws, MaskedWorkspace=mask)

    if previous_calibration:
        previous_calibration = LoadDiffCal(previous_calibration,
                                           MakeGroupingWorkspace=False,
                                           MakeMaskWorkspace=False)

    diffcal = do_group_calibration(ws,
                                   groups,
                                   previous_calibration,
                                   cc_kwargs=cc_kwargs,
                                   pdcal_kwargs=pdcal_kwargs)
    mask = mtd['group_calibration_pd_diffcal_mask']

    CreateGroupingWorkspace(InputWorkspace=ws,
                            GroupDetectorsBy=out_groups_by,
                            OutputWorkspace='out_groups')
    SaveDiffCal(CalibrationWorkspace=diffcal,
                MaskWorkspace=mask,
                GroupingWorkspace=mtd['out_groups'],
                Filename=calfilename)
    def PyExec(self):
        temporary_workspaces = []
        self.temp_ws = temp_workspace_generator(
            temporary_workspaces)  # name generator for temporary workpaces

        prefix_output = self.getProperty('OutputWorkspacesPrefix').value
        progress_percent_start, progress_percent_end, reports_count = 0.0, 0.01, 5
        progress = Progress(self, progress_percent_start, progress_percent_end,
                            reports_count)
        input_workspace = self.getPropertyValue(
            'InputWorkspace')  # name of the input workspace
        adjustment_diagnostics = list(
        )  # list workspace names that analyze the orientation of the banks

        # Create a grouping workspace whereby we group detectors by banks
        grouping_workspace = self.temp_ws()  # a temporary name
        CreateGroupingWorkspace(InputWorkspace=input_workspace,
                                OutputWorkspace=grouping_workspace,
                                GroupDetectorsBy='bank')

        # Remove delayed emission time from the moderator
        kwargs = dict(InputWorkspace=input_workspace,
                      Emode='Elastic',
                      OutputWorkspace=input_workspace)
        self.run_algorithm('ModeratorTzero',
                           0,
                           0.02,
                           soft_crash=True,
                           **kwargs)
        progress.report('ModeratorTzero has been applied')

        # Find dSpacing to TOF conversion DIFC parameter
        difc_table = f'{prefix_output}PDCalibration_difc'
        diagnostics_workspaces = prefix_output + 'PDCalibration_diagnostics'  # group workspace
        kwargs = dict(InputWorkspace=input_workspace,
                      TofBinning=self.getProperty('TofBinning').value,
                      PeakFunction=self.getProperty('PeakFunction').value,
                      PeakPositions=self.getProperty('PeakPositions').value,
                      CalibrationParameters='DIFC',
                      OutputCalibrationTable=difc_table,
                      DiagnosticWorkspaces=diagnostics_workspaces)
        PDCalibration(**kwargs)
        progress.report('PDCalibration has been applied')

        # Create one spectra in d-spacing for each bank using the original instrument geometry
        self.fitted_in_dspacing(
            fitted_in_tof=prefix_output + 'PDCalibration_diagnostics_fitted',
            workspace_with_instrument=input_workspace,
            output_workspace=prefix_output + 'PDCalibration_peaks_original',
            grouping_workspace=grouping_workspace)
        adjustment_diagnostics.append(prefix_output +
                                      'PDCalibration_peaks_original')

        # Find the peak centers in TOF units, for the peaks found at each pixel
        peak_centers_in_tof = prefix_output + 'PDCalibration_diagnostics_tof'
        self.centers_in_tof(
            prefix_output + 'PDCalibration_diagnostics_dspacing', difc_table,
            peak_centers_in_tof)
        mtd[diagnostics_workspaces].add(peak_centers_in_tof)

        # Find the Histogram of peak deviations (in d-spacing units)
        # for each bank, using the original instrument geometry
        self.histogram_peak_deviations(
            prefix_output + 'PDCalibration_diagnostics_tof', input_workspace,
            prefix_output + 'peak_deviations_original', grouping_workspace)
        adjustment_diagnostics.append(prefix_output +
                                      'peak_deviations_original')

        # repeat with percent peak deviations for each bank, using the adjusted instrument geometry
        self.histogram_peak_deviations(
            prefix_output + 'PDCalibration_diagnostics_tof',
            input_workspace,
            prefix_output + 'percent_peak_deviations_original',
            grouping_workspace,
            deviation_params=[-10, 0.01, 10],
            percent_deviations=True)
        adjustment_diagnostics.append(prefix_output +
                                      'percent_peak_deviations_original')

        # store the DIFC and DIFC_mask workspace created by PDCalibration in the diagnostics workspace
        mtd[diagnostics_workspaces].add(difc_table)
        mtd[diagnostics_workspaces].add(difc_table + '_mask')

        adjustments_table_name = f'{prefix_output}adjustments'
        # Adjust the position of the source along the beam (Z) axis
        # The instrument in `input_workspace` is adjusted in-place
        if self.getProperty('AdjustSource').value is True:
            dz = self.getProperty('SourceMaxTranslation').value
            kwargs = dict(
                InputWorkspace=input_workspace,
                OutputWorkspace=input_workspace,
                PeakCentersTofTable=peak_centers_in_tof,
                PeakPositions=self.getProperty('PeakPositions').value,
                MaskWorkspace=f'{difc_table}_mask',
                FitSourcePosition=True,
                FitSamplePosition=False,
                Zposition=True,
                MinZPosition=-dz,
                MaxZPosition=dz,
                Minimizer='L-BFGS-B')
            self.run_algorithm('AlignComponents', 0.1, 0.2, **kwargs)
        else:
            # Impose the fixed position of the source and save into the adjustments table
            self._fixed_source_set_and_table(adjustments_table_name)
        # Translate and rotate the each bank, only after the source has been adjusted
        # The instrument in `input_workspace` is adjusted in-place

        # Translation options to AlignComponents
        dt = self.getProperty('ComponentMaxTranslation'
                              ).value  # maximum translation along either axis
        move_y = False if self.getProperty('FixY').value is True else True
        kwargs_transl = dict(Xposition=True,
                             MinXPosition=-dt,
                             MaxXPosition=dt,
                             Yposition=move_y,
                             MinYPosition=-dt,
                             MaxYPosition=dt,
                             Zposition=True,
                             MinZPosition=-dt,
                             MaxZPosition=dt)

        # Rotation options for AlignComponents
        dr = self.getProperty(
            'ComponentMaxRotation').value  # maximum rotation along either axis
        rot_z = False if self.getProperty('FixYaw').value is True else True
        kwargs_rotat = dict(AlphaRotation=True,
                            MinAlphaRotation=-dr,
                            MaxAlphaRotation=dr,
                            BetaRotation=True,
                            MinBetaRotation=-dr,
                            MaxBetaRotation=dr,
                            GammaRotation=rot_z,
                            MinGammaRotation=-dr,
                            MaxGammaRotation=dr,
                            EulerConvention='YXZ')

        # Remaining options for AlignComponents
        displacements_table_name = f'{prefix_output}displacements'
        kwargs = dict(InputWorkspace=input_workspace,
                      OutputWorkspace=input_workspace,
                      PeakCentersTofTable=peak_centers_in_tof,
                      PeakPositions=self.getProperty('PeakPositions').value,
                      MaskWorkspace=f'{difc_table}_mask',
                      AdjustmentsTable=adjustments_table_name + '_banks',
                      DisplacementsTable=displacements_table_name,
                      FitSourcePosition=False,
                      FitSamplePosition=False,
                      ComponentList=self.getProperty('ComponentList').value,
                      Minimizer=self.getProperty('Minimizer').value,
                      MaxIterations=self.getProperty('MaxIterations').value)

        self.run_algorithm('AlignComponents', 0.2, 0.97, **kwargs,
                           **kwargs_transl, **kwargs_rotat)
        progress.report('AlignComponents has been applied')

        # AlignComponents produces two unwanted workspaces
        temporary_workspaces.append('calWS')

        # Append the banks table to the source table, then delete the banks table.
        self._append_second_to_first(adjustments_table_name,
                                     adjustments_table_name + '_banks')
        # Create one spectra in d-spacing for each bank using the adjusted instrument geometry.
        # The spectra can be compare to those of prefix_output + 'PDCalibration_peaks_original'
        self.fitted_in_dspacing(
            fitted_in_tof=prefix_output + 'PDCalibration_diagnostics_fitted',
            workspace_with_instrument=input_workspace,
            output_workspace=prefix_output + 'PDCalibration_peaks_adjusted',
            grouping_workspace=grouping_workspace)
        adjustment_diagnostics.append(prefix_output +
                                      'PDCalibration_peaks_adjusted')

        # Find the Histogram of peak deviations (in d-spacing units)
        # for each bank, using the adjusted instrument geometry
        self.histogram_peak_deviations(
            prefix_output + 'PDCalibration_diagnostics_tof', input_workspace,
            prefix_output + 'peak_deviations_adjusted', grouping_workspace)
        adjustment_diagnostics.append(prefix_output +
                                      'peak_deviations_adjusted')

        # repeat with percent peak deviations for each bank, using the adjusted instrument geometry
        self.histogram_peak_deviations(
            prefix_output + 'PDCalibration_diagnostics_tof',
            input_workspace,
            prefix_output + 'percent_peak_deviations_adjusted',
            grouping_workspace,
            deviation_params=[-10, 0.01, 10],
            percent_deviations=True)
        adjustment_diagnostics.append(prefix_output +
                                      'percent_peak_deviations_adjusted')

        # summarize the changes observed in the histogram of percent peak deviations
        self.peak_deviations_summarize(
            prefix_output + 'percent_peak_deviations_original',
            prefix_output + 'percent_peak_deviations_adjusted',
            prefix_output + 'percent_peak_deviations_summary')
        adjustment_diagnostics.append(prefix_output +
                                      'percent_peak_deviations_summary')

        # Create a WorkspaceGroup with the orientation diagnostics
        GroupWorkspaces(InputWorkspaces=adjustment_diagnostics,
                        OutputWorkspace=prefix_output +
                        'bank_adjustment_diagnostics')

        # clean up at the end (only happens if algorithm completes sucessfully)
        [
            DeleteWorkspace(name) for name in temporary_workspaces
            if AnalysisDataService.doesExist(name)
        ]
Example #12
0
    def PyExec(self):
        runs = self.getProperty("Filename").value

        if not runs:
            ipts = self.getProperty("IPTS").value
            runs = ['/HFIR/HB2C/IPTS-{}/nexus/HB2C_{}.nxs.h5'.format(ipts, run) for run in self.getProperty("RunNumbers").value]

        grouping = self.getProperty("Grouping").value
        if grouping == 'None':
            grouping = 1
        else:
            grouping = 2 if grouping == '2x2' else 4

        x_dim = 480*8 // grouping
        y_dim = 512 // grouping

        number_of_runs = len(runs)

        data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64)

        s1_array = []
        duration_array = []
        run_number_array = []
        monitor_count_array = []

        progress = Progress(self, 0.0, 1.0, number_of_runs+3)

        for n, run in enumerate(runs):
            progress.report('Loading: '+run)
            with h5py.File(run, 'r') as f:
                bc = np.zeros((512*480*8),dtype=np.int64)
                for b in range(8):
                    bc += np.bincount(f['/entry/bank'+str(b+1)+'_events/event_id'].value,minlength=512*480*8)
                bc = bc.reshape((480*8, 512))
                if grouping == 2:
                    bc = bc[::2,::2]+bc[1::2,::2]+bc[::2,1::2]+bc[1::2,1::2]
                elif grouping == 4:
                    bc = (bc[::4,::4]    + bc[1::4,::4]  + bc[2::4,::4]  + bc[3::4,::4]
                          + bc[::4,1::4] + bc[1::4,1::4] + bc[2::4,1::4] + bc[3::4,1::4]
                          + bc[::4,2::4] + bc[1::4,2::4] + bc[2::4,2::4] + bc[3::4,2::4]
                          + bc[::4,3::4] + bc[1::4,3::4] + bc[2::4,3::4] + bc[3::4,3::4])
                data_array[n] = bc
                s1_array.append(f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0])
                duration_array.append(float(f['/entry/duration'].value[0]))
                run_number_array.append(float(f['/entry/run_number'].value[0]))
                monitor_count_array.append(float(f['/entry/monitor1/total_counts'].value[0]))

        progress.report('Creating MDHistoWorkspace')
        createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace", enableLogging=False)
        createWS_alg.setProperty("SignalInput", data_array)
        createWS_alg.setProperty("ErrorInput", np.sqrt(data_array))
        createWS_alg.setProperty("Dimensionality", 3)
        createWS_alg.setProperty("Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim+0.5, x_dim+0.5, number_of_runs+0.5))
        createWS_alg.setProperty("NumberOfBins", '{},{},{}'.format(y_dim,x_dim,number_of_runs))
        createWS_alg.setProperty("Names", 'y,x,scanIndex')
        createWS_alg.setProperty("Units", 'bin,bin,number')
        createWS_alg.execute()
        outWS = createWS_alg.getProperty("OutputWorkspace").value

        progress.report('Getting IDF')
        # Get the instrument and some logs from the first file; assume the rest are the same
        _tmp_ws = LoadEventNexus(runs[0], MetaDataOnly=True, EnableLogging=False)
        # The following logs should be the same for all runs
        RemoveLogs(_tmp_ws,
                   KeepLogs='HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,'
                   'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,'
                   'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix',
                   EnableLogging=False)

        try:
            ub = np.array(re.findall(r'-?\d+\.*\d*', _tmp_ws.run().getProperty('HB2C:CS:CrystalAlign:UBMatrix').value[0]),
                          dtype=np.float).reshape(3,3)
            sgl = np.deg2rad(_tmp_ws.run().getProperty('HB2C:Mot:sgl.RBV').value[0]) # 'HB2C:Mot:sgl.RBV,1,0,0,-1'
            sgu = np.deg2rad(_tmp_ws.run().getProperty('HB2C:Mot:sgu.RBV').value[0]) # 'HB2C:Mot:sgu.RBV,0,0,1,-1'
            sgl_a = np.array([[           1,            0,           0],
                              [           0,  np.cos(sgl), np.sin(sgl)],
                              [           0, -np.sin(sgl), np.cos(sgl)]])
            sgu_a = np.array([[ np.cos(sgu),  np.sin(sgu),           0],
                              [-np.sin(sgu),  np.cos(sgu),           0],
                              [           0,            0,           1]])
            UB = sgl_a.dot(sgu_a).dot(ub) # Apply the Goniometer tilts to the UB matrix
            SetUB(_tmp_ws, UB=UB, EnableLogging=False)
        except (RuntimeError, ValueError):
            SetUB(_tmp_ws, EnableLogging=False)

        if grouping > 1:
            _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws, EnableLogging=False)

            group_number = 0
            for x in range(0,480*8,grouping):
                for y in range(0,512,grouping):
                    group_number += 1
                    for j in range(grouping):
                        for i in range(grouping):
                            _tmp_group.dataY(y+i+(x+j)*512)[0] = group_number

            _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws, CopyGroupingFromWorkspace=_tmp_group, EnableLogging=False)
            DeleteWorkspace(_tmp_group, EnableLogging=False)

        progress.report('Adding logs')

        # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen
        # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo
        _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False)
        _tmp_ws = ConvertToMD(_tmp_ws, dEAnalysisMode='Elastic', EnableLogging=False, PreprocDetectorsWS='__PreprocessedDetectorsWS')

        preprocWS = mtd['__PreprocessedDetectorsWS']
        twotheta = preprocWS.column(2)
        azimuthal = preprocWS.column(3)

        outWS.copyExperimentInfos(_tmp_ws)
        DeleteWorkspace(_tmp_ws, EnableLogging=False)
        DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False)
        # end Hack

        outWS.getExperimentInfo(0).run().addProperty('s1', s1_array, True)
        outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg'
        outWS.getExperimentInfo(0).run().addProperty('duration', duration_array, True)
        outWS.getExperimentInfo(0).run().getProperty('duration').units = 'second'
        outWS.getExperimentInfo(0).run().addProperty('run_number', run_number_array, True)
        outWS.getExperimentInfo(0).run().addProperty('monitor_count', monitor_count_array, True)
        outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta, True)
        outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal, True)

        self.setProperty("OutputWorkspace", outWS)
Example #13
0
    def load_and_group(self, runs: List[str]) -> IMDHistoWorkspace:
        """
        Load the data with given grouping
        """
        # grouping config
        grouping = self.getProperty("Grouping").value
        if grouping == 'None':
            grouping = 1
        else:
            grouping = 2 if grouping == '2x2' else 4
        number_of_runs = len(runs)

        x_dim = 480 * 8 // grouping
        y_dim = 512 // grouping

        data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64)

        s1_array = []
        duration_array = []
        run_number_array = []
        monitor_count_array = []

        progress = Progress(self, 0.0, 1.0, number_of_runs + 3)

        for n, run in enumerate(runs):
            progress.report('Loading: ' + run)
            with h5py.File(run, 'r') as f:
                bc = np.zeros((512 * 480 * 8), dtype=np.int64)
                for b in range(8):
                    bc += np.bincount(f['/entry/bank' + str(b + 1) +
                                        '_events/event_id'].value,
                                      minlength=512 * 480 * 8)
                bc = bc.reshape((480 * 8, 512))
                if grouping == 2:
                    bc = bc[::2, ::2] + bc[1::2, ::2] + bc[::2,
                                                           1::2] + bc[1::2,
                                                                      1::2]
                elif grouping == 4:
                    bc = bc[::4, ::4] + bc[1::4, ::4] + bc[2::4, ::4] + bc[3::4, ::4] + bc[::4, 1::4] + bc[1::4, 1::4] + bc[2::4, 1::4] + \
                         bc[3::4, 1::4] + bc[::4, 2::4] + bc[1::4, 2::4] + bc[2::4, 2::4] + bc[3::4, 2::4] + bc[::4, 3::4] + \
                         bc[1::4, 3::4] + bc[2::4, 3::4] + bc[3::4, 3::4]
                data_array[n] = bc
                s1_array.append(
                    f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0])
                duration_array.append(float(f['/entry/duration'].value[0]))
                run_number_array.append(float(f['/entry/run_number'].value[0]))
                monitor_count_array.append(
                    float(f['/entry/monitor1/total_counts'].value[0]))

        progress.report('Creating MDHistoWorkspace')
        createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace",
                                                 enableLogging=False)
        createWS_alg.setProperty("SignalInput", data_array)
        createWS_alg.setProperty("ErrorInput", np.sqrt(data_array))
        createWS_alg.setProperty("Dimensionality", 3)
        createWS_alg.setProperty(
            "Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim + 0.5, x_dim + 0.5,
                                                     number_of_runs + 0.5))
        createWS_alg.setProperty(
            "NumberOfBins", '{},{},{}'.format(y_dim, x_dim, number_of_runs))
        createWS_alg.setProperty("Names", 'y,x,scanIndex')
        createWS_alg.setProperty("Units", 'bin,bin,number')
        createWS_alg.execute()
        outWS = createWS_alg.getProperty("OutputWorkspace").value

        progress.report('Getting IDF')
        # Get the instrument and some logs from the first file; assume the rest are the same
        _tmp_ws = LoadEventNexus(runs[0],
                                 MetaDataOnly=True,
                                 EnableLogging=False)
        # The following logs should be the same for all runs
        RemoveLogs(
            _tmp_ws,
            KeepLogs=
            'HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,'
            'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,'
            'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix',
            EnableLogging=False)

        time_ns_array = _tmp_ws.run().startTime().totalNanoseconds(
        ) + np.append(0,
                      np.cumsum(duration_array) * 1e9)[:-1]

        try:
            ub = np.array(re.findall(
                r'-?\d+\.*\d*',
                _tmp_ws.run().getProperty(
                    'HB2C:CS:CrystalAlign:UBMatrix').value[0]),
                          dtype=float).reshape(3, 3)
            sgl = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgl.RBV').value[0])  # 'HB2C:Mot:sgl.RBV,1,0,0,-1'
            sgu = np.deg2rad(_tmp_ws.run().getProperty(
                'HB2C:Mot:sgu.RBV').value[0])  # 'HB2C:Mot:sgu.RBV,0,0,1,-1'
            sgl_a = np.array([[1, 0, 0], [0, np.cos(sgl),
                                          np.sin(sgl)],
                              [0, -np.sin(sgl), np.cos(sgl)]])
            sgu_a = np.array([[np.cos(sgu), np.sin(sgu), 0],
                              [-np.sin(sgu), np.cos(sgu), 0], [0, 0, 1]])
            UB = sgl_a.dot(sgu_a).dot(
                ub)  # Apply the Goniometer tilts to the UB matrix
            SetUB(_tmp_ws, UB=UB, EnableLogging=False)
        except (RuntimeError, ValueError):
            SetUB(_tmp_ws, EnableLogging=False)

        if grouping > 1:
            _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws,
                                                       EnableLogging=False)

            group_number = 0
            for x in range(0, 480 * 8, grouping):
                for y in range(0, 512, grouping):
                    group_number += 1
                    for j in range(grouping):
                        for i in range(grouping):
                            _tmp_group.dataY(y + i +
                                             (x + j) * 512)[0] = group_number

            _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws,
                                     CopyGroupingFromWorkspace=_tmp_group,
                                     EnableLogging=False)
            DeleteWorkspace(_tmp_group, EnableLogging=False)

        progress.report('Adding logs')

        # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen
        # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo
        _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False)
        _tmp_ws = ConvertToMD(_tmp_ws,
                              dEAnalysisMode='Elastic',
                              EnableLogging=False,
                              PreprocDetectorsWS='__PreprocessedDetectorsWS')

        preprocWS = mtd['__PreprocessedDetectorsWS']
        twotheta = preprocWS.column(2)
        azimuthal = preprocWS.column(3)

        outWS.copyExperimentInfos(_tmp_ws)
        DeleteWorkspace(_tmp_ws, EnableLogging=False)
        DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False)
        # end Hack

        add_time_series_property('s1',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, s1_array)
        outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg'
        add_time_series_property('duration',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, duration_array)
        outWS.getExperimentInfo(0).run().getProperty(
            'duration').units = 'second'
        outWS.getExperimentInfo(0).run().addProperty('run_number',
                                                     run_number_array, True)
        add_time_series_property('monitor_count',
                                 outWS.getExperimentInfo(0).run(),
                                 time_ns_array, monitor_count_array)
        outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta,
                                                     True)
        outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal,
                                                     True)

        setGoniometer_alg = self.createChildAlgorithm("SetGoniometer",
                                                      enableLogging=False)
        setGoniometer_alg.setProperty("Workspace", outWS)
        setGoniometer_alg.setProperty("Axis0", 's1,0,1,0,1')
        setGoniometer_alg.setProperty("Average", False)
        setGoniometer_alg.execute()

        return outWS