Exemple #1
0
    def set_output_workspaces(self, reduction_mode_vs_output_workspaces,
                              reduction_mode_vs_workspace_names):
        """
        Sets the output workspaces which can be HAB, LAB or Merged.

        At this step we also provide a workspace name to the sample logs which can be used later on for saving
        :param reduction_mode_vs_output_workspaces:  map from reduction mode to output workspace
        :param reduction_mode_vs_workspace_names: an unused dict. Required for version 2 compatibility
        """
        # Note that this breaks the flexibility that we have established with the reduction mode. We have not hardcoded
        # HAB or LAB anywhere which means that in the future there could be other detectors of relevance. Here we
        # reference HAB and LAB directly since we currently don't want to rely on dynamic properties. See also in PyInit

        merged, lab, hab = WorkspaceGroup(), WorkspaceGroup(), WorkspaceGroup()

        for reduction_mode, output_workspace_list in reduction_mode_vs_output_workspaces.items(
        ):
            for output_workspace in output_workspace_list:
                if reduction_mode is ReductionMode.MERGED:
                    merged.addWorkspace(output_workspace)
                elif reduction_mode is ReductionMode.LAB:
                    lab.addWorkspace(output_workspace)
                elif reduction_mode is ReductionMode.HAB:
                    hab.addWorkspace(output_workspace)
                else:
                    raise RuntimeError(
                        "SANSSingleReduction: Cannot set the output workspace. The selected reduction "
                        "mode {0} is unknown.".format(reduction_mode))

        self._set_prop_if_group_has_data("OutputWorkspaceLAB", lab)
        self._set_prop_if_group_has_data("OutputWorkspaceHAB", hab)
        self._set_prop_if_group_has_data("OutputWorkspaceMerged", merged)
    def PyExec(self):
        workspace = get_input_workspace_as_copy_if_not_same_as_output_workspace(
            self)
        wavelength_pairs: List[Tuple[float, float]] = json.loads(
            self.getProperty(self.WAV_PAIRS).value)
        progress = Progress(self,
                            start=0.0,
                            end=1.0,
                            nreports=1 +
                            len(wavelength_pairs))  # 1 - convert units

        # Convert the units into wavelength
        progress.report("Converting workspace to wavelength units.")
        workspace = self._convert_units_to_wavelength(workspace)

        # Get the rebin option
        output_group = WorkspaceGroup()
        for pair in wavelength_pairs:
            rebin_string = self._get_rebin_string(workspace, *pair)
            progress.report(f"Converting wavelength range: {rebin_string}")

            # Perform the rebin
            rebin_options = self._get_rebin_params(rebin_string, workspace)
            out_ws = self._perform_rebin(rebin_options)

            append_to_sans_file_tag(out_ws, "_toWavelength")
            output_group.addWorkspace(out_ws)
        self.setProperty("OutputWorkspace", output_group)
Exemple #3
0
 def _create_sample_ws_group(ws_name):
     # This has to be done as two steps or the simple API can't figure out the output name
     ws_group = WorkspaceGroup()
     ws_group.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_name, Function='Flat background',
                                                 NumBanks=1, BankPixelWidth=1, NumEvents=1,
                                                 XMin=1, XMax=14, BinWidth=2))
     return ws_group
Exemple #4
0
    def set_output_workspaces(self,
                              workflow_outputs: SANSWorkflowAlgorithmOutputs):
        """
        Sets the output workspaces which can be HAB, LAB or Merged.

        At this step we also provide a workspace name to the sample logs which can be used later on for saving
        :param workflow_outputs:  collection of wavelength sliced and reduced workspaces
        """
        # Note that this breaks the flexibility that we have established with the reduction mode. We have not hardcoded
        # HAB or LAB anywhere which means that in the future there could be other detectors of relevance. Here we
        # reference HAB and LAB directly since we currently don't want to rely on dynamic properties. See also in PyInit

        merged, lab, hab, scaled = WorkspaceGroup(), WorkspaceGroup(
        ), WorkspaceGroup(), WorkspaceGroup()

        for ws in workflow_outputs.lab_output:
            lab.addWorkspace(ws)

        for ws in workflow_outputs.hab_output:
            hab.addWorkspace(ws)

        for ws in workflow_outputs.merged_output:
            merged.addWorkspace(ws)

        for ws in workflow_outputs.scaled_hab_output:
            scaled.addWorkspace(ws)

        self._set_prop_if_group_has_data("OutputWorkspaceLAB", lab)
        self._set_prop_if_group_has_data("OutputWorkspaceHAB", hab)
        self._set_prop_if_group_has_data("OutputWorkspaceHABScaled", scaled)
        self._set_prop_if_group_has_data("OutputWorkspaceMerged", merged)
Exemple #5
0
    def set_reduced_can_workspace_on_output(self, completed_event_bundled):
        """
        Sets the reduced can workspaces on the output properties.

        The reduced can workspaces can be either LAB or HAB
        :param completed_event_bundled: a list containing output bundles
        """
        # Find the LAB Can and HAB Can entries if they exist
        lab_groups = WorkspaceGroup()
        hab_groups = WorkspaceGroup()

        for bundle in completed_event_bundled:
            if bundle.output_bundle.data_type is DataType.CAN:
                reduction_mode = bundle.output_bundle.reduction_mode
                output_workspace = bundle.output_bundle.output_workspace
                # Make sure that the output workspace is not None which can be the case if there has never been a
                # can set for the reduction.

                if output_workspace is not None and not does_can_workspace_exist_on_ads(
                        output_workspace):
                    if reduction_mode is ReductionMode.LAB:
                        lab_groups.addWorkspace(output_workspace)
                    elif reduction_mode is ReductionMode.HAB:
                        hab_groups.addWorkspace(output_workspace)
                    else:
                        raise RuntimeError(
                            "SANSSingleReduction: The reduction mode {0} should not"
                            " be set with a can.".format(reduction_mode))

        self._set_prop_if_group_has_data("OutputWorkspaceLABCan", lab_groups)
        self._set_prop_if_group_has_data("OutputWorkspaceHABCan", hab_groups)
Exemple #6
0
 def create_group_workspace_and_load(self):
     grpws = WorkspaceGroup()
     ws_detector1 = '9999; Detector 1'
     grpws.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_detector1))
     ws_detector2 = '9999; Detector 2'
     grpws.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_detector2))
     run = 9999
     self.loadedData.add_data(run=[run], workspace=grpws)
Exemple #7
0
 def loaded_workspace_as_group(self, run):
     if self.is_multi_period():
         workspace_group = WorkspaceGroup()
         for workspace_wrapper in self._loaded_data.get_data(run=run, instrument=self.instrument)['workspace']['OutputWorkspace']:
             workspace_group.addWorkspace(workspace_wrapper.workspace)
         return workspace_group
     else:
         return self._loaded_data.get_data(run=run, instrument=self.instrument)['workspace']['OutputWorkspace'][0].workspace
    def test_that_can_add_workspaces_to_WorkspaceGroup_when_not_in_ADS(self):
        ws1 = WorkspaceFactory.create("Workspace2D", 2, 2, 2)
        ws2 = WorkspaceFactory.create("Workspace2D", 2, 2, 2)

        ws_group = WorkspaceGroup()

        ws_group.addWorkspace(ws1)
        ws_group.addWorkspace(ws2)

        self.assertEqual(ws_group.size(), 2)
    def test_that_can_add_workspaces_to_WorkspaceGroup_when_not_in_ADS(self):
        ws1 = WorkspaceFactory.create("Workspace2D", 2, 2, 2)
        ws2 = WorkspaceFactory.create("Workspace2D", 2, 2, 2)

        ws_group = WorkspaceGroup()

        ws_group.addWorkspace(ws1)
        ws_group.addWorkspace(ws2)

        self.assertEqual(ws_group.size(), 2)
Exemple #10
0
 def create_group_workspace_and_load(self):
     grpws = WorkspaceGroup()
     ws_detector1 = '9999; Detector 1'
     grpws.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_detector1))
     ws_detector2 = '9999; Detector 2'
     grpws.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_detector2))
     run = 9999
     self.context.data_context._loaded_data.add_data(run=[run], workspace=grpws)
     loaded_data = self.context.data_context._loaded_data
     self.context.group_context.reset_group_to_default(loaded_data)
    def test_plot_default_case_with_detector_3_not_present(self):
        grpws = WorkspaceGroup()
        ws_detector1 = '9999; Detector 1'
        grpws.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_detector1))
        ws_detector2 = '9999; Detector 2'
        grpws.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_detector2))
        ws_detector3 = '9999; Detector 4'
        grpws.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_detector3))
        ws_detector4 = '9998; Detector 1'
        grpws.addWorkspace(CreateSampleWorkspace(OutputWorkspace=ws_detector4))
        run = [9999, 9998]
        self.context.data_context._loaded_data.add_data(run=run,
                                                        workspace=grpws)
        loadedData = self.context.data_context._loaded_data
        self.context.group_context.reset_group_to_default(loadedData)
        self.presenter.add_group_to_view(self.context.group_context._groups[0],
                                         False)
        self.presenter.add_group_to_view(self.context.group_context._groups[1],
                                         False)
        self.presenter.add_group_to_view(self.context.group_context._groups[2],
                                         False)
        self.presenter.add_group_to_view(self.context.group_context._groups[3],
                                         False)
        self.assertEqual(self.view.num_rows(), 4)
        self.assertEqual(len(self.model.groups), 4)
        analyse_checkbox = self.view.get_table_item(0, 4)
        self.assertEqual(analyse_checkbox.checkState(), 0)

        self.presenter.plot_default_case()
        self.assertCountEqual(self.context.group_context.selected_groups,
                              ['9999; Detector 1', '9998; Detector 1'])
Exemple #12
0
 def test_finalise_groupworkspace(self):
     """
         The test will fail if it cannot delete a workspace because it does not exist
     """
     grpws = WorkspaceGroup()
     ws_detector1 = CreateSampleWorkspace()
     grpws.addWorkspace(ws_detector1)
     grpws_name = 'New Group Workspace'
     ws1 = CreateSampleWorkspace()
     ws2 = CreateSampleWorkspace()
     ws3 = CreateSampleWorkspace()
     ws_list = [ws1, ws2, ws3]
     load_utils_ea.finalise_groupworkspace(self.model, grpws, grpws_name,
                                           ws_list)
     self.assertTrue(AnalysisDataService.doesExist('New Group Workspace'))
Exemple #13
0
    def test_grouped_workspaces_not_in_ads(self):
        fig = plt.figure()
        plt.plot([0, 1], [0, 1])

        num_plots = 3
        ws_list = []
        ws_group = WorkspaceGroup()
        for i in range(num_plots):
            ws = CloneWorkspace(self._test_ws, StoreInADS=False)
            ws_list.append(ws)
            ws_group.addWorkspace(ws)

        plot([ws_group], wksp_indices=[1], fig=fig, overplot=True)
        ax = plt.gca()
        self.assertEqual(len(ws_group) + 1, len(ax.lines))
        self.assertEqual(len(ws_group) + 1, len(ax.lines))
Exemple #14
0
 def merge_and_crop_workspaces(self, workspaces):
     """ where workspaces is a tuple of form:
             (filepath, ws name)
     """
     workspace_name = self.getPropertyValue('GroupWorkspace')
     # detectors is a dictionary of {detector_name : [names_of_workspaces]}
     detectors = {
         f"{workspace_name}; Detector {x}": []
         for x in range(1, 5)
     }
     # fill dictionary
     for workspace in workspaces:
         detector_number = workspace[0]
         detectors[f"{workspace_name}; Detector {detector_number}"].append(
             workspace)
     # initialise a group workspace
     overall_ws = WorkspaceGroup()
     # merge each workspace list in detectors into a single workspace
     for detector, workspace_list in detectors.items():
         if workspace_list:
             # sort workspace list according to type_index
             sorted_workspace_list = [None] * NUM_FILES_PER_DETECTOR
             # sort workspace list according to type_index
             for workspace in workspace_list:
                 data_type = workspace.rsplit("_")[1]
                 sorted_workspace_list[SPECTRUM_INDEX[data_type] -
                                       1] = workspace
             workspace_list = sorted_workspace_list
             # create merged workspace
             merged_ws = self.create_merged_workspace(workspace_list)
             ConvertToHistogram(InputWorkspace=merged_ws,
                                OutputWorkspace=detector)
             minX, maxX = [], []
             ws = AnalysisDataService.retrieve(detector)
             for i in range(ws.getNumberHistograms()):
                 xdata = ws.readX(i)
                 minX.append(xdata[0])
                 if i == 2:
                     maxX.append(xdata[-1])
                 else:
                     maxX.append(xdata[-1] - 1)
             CropWorkspaceRagged(InputWorkspace=detector,
                                 OutputWorkspace=detector,
                                 xmin=minX,
                                 xmax=maxX)
             overall_ws.addWorkspace(AnalysisDataService.retrieve(detector))
     self.setProperty("GroupWorkspace", overall_ws)
Exemple #15
0
    def set_transmission_workspaces_on_output(self, completed_event_slices,
                                              fit_state):
        calc_can, calc_sample = WorkspaceGroup(), WorkspaceGroup()
        unfit_can, unfit_sample = WorkspaceGroup(), WorkspaceGroup()

        output_hab_or_lab = None
        for bundle in completed_event_slices:
            if output_hab_or_lab is not None and output_hab_or_lab != bundle.output_bundle.reduction_mode:
                continue  # The transmission workspace for HAB/LAB is the same, so only output one
            output_hab_or_lab = bundle.output_bundle.reduction_mode
            calculated_transmission_workspace = bundle.transmission_bundle.calculated_transmission_workspace
            unfitted_transmission_workspace = bundle.transmission_bundle.unfitted_transmission_workspace
            if bundle.transmission_bundle.data_type is DataType.CAN:
                if does_can_workspace_exist_on_ads(
                        calculated_transmission_workspace):
                    # The workspace is cloned here because the transmission runs are diagnostic output so even though
                    # the values already exist they need to be labelled separately for each reduction.
                    calculated_transmission_workspace = CloneWorkspace(
                        calculated_transmission_workspace, StoreInADS=False)
                if does_can_workspace_exist_on_ads(
                        unfitted_transmission_workspace):
                    unfitted_transmission_workspace = CloneWorkspace(
                        unfitted_transmission_workspace, StoreInADS=False)
                if calculated_transmission_workspace:
                    calc_can.addWorkspace(calculated_transmission_workspace)
                if unfitted_transmission_workspace:
                    unfit_can.addWorkspace(unfitted_transmission_workspace)

            elif bundle.transmission_bundle.data_type is DataType.SAMPLE:
                if calculated_transmission_workspace:
                    calc_sample.addWorkspace(calculated_transmission_workspace)
                if unfitted_transmission_workspace:
                    unfit_sample.addWorkspace(unfitted_transmission_workspace)
            else:
                raise RuntimeError(
                    "SANSSingleReduction: The data type {0} should be"
                    " sample or can.".format(
                        bundle.transmission_bundle.data_type))

        self._set_prop_if_group_has_data(
            "OutputWorkspaceCalculatedTransmission", calc_sample)
        self._set_prop_if_group_has_data("OutputWorkspaceUnfittedTransmission",
                                         unfit_sample)
        self._set_prop_if_group_has_data(
            "OutputWorkspaceCalculatedTransmissionCan", calc_can)
        self._set_prop_if_group_has_data(
            "OutputWorkspaceUnfittedTransmissionCan", unfit_can)
def create_test_workspacegroup(group_name=None, size=None, items=None):
    if size is not None and items is not None:
        raise ValueError("Provide either size or items not both.")

    group_name = group_name if group_name is not None else 'fitting_context_testgroup'
    group = WorkspaceGroup()
    if size is not None:
        for i in range(size):
            ws_name = '{}_{}'.format(group_name, i)
            fake_ws = create_test_workspace(ws_name)
            group.addWorkspace(fake_ws)
    elif items is not None:
        for item in items:
            group.addWorkspace(item)

    ads = AnalysisDataService.Instance()
    ads.addOrReplace(group_name, group)
    return group
    def set_reduced_can_workspace_on_output(self, output_bundles):
        """
        Sets the reduced can group workspaces on the output properties.
        The reduced can workspaces can be:
        LAB Can or
        HAB Can

        :param output_bundles: a list of output bundles
        """
        workspace_group_lab_can = WorkspaceGroup()
        workspace_group_hab_can = WorkspaceGroup()
        # Find the LAB Can and HAB Can entries if they exist
        for component_bundle in output_bundles:
            for output_bundle in component_bundle:
                if output_bundle.data_type is DataType.Can:
                    reduction_mode = output_bundle.reduction_mode
                    output_workspace = output_bundle.output_workspace
                    # Make sure that the output workspace is not None which can be the case if there has never been a
                    # can set for the reduction.
                    if output_workspace is not None and not does_can_workspace_exist_on_ads(
                            output_workspace):
                        name = self._get_output_workspace_name(
                            output_bundle.state,
                            output_bundle.reduction_mode,
                            can=True)
                        AnalysisDataService.addOrReplace(
                            name, output_workspace)
                        if reduction_mode is ISISReductionMode.LAB:
                            workspace_group_lab_can.addWorkspace(
                                output_workspace)
                        elif reduction_mode is ISISReductionMode.HAB:
                            workspace_group_hab_can.addWorkspace(
                                output_workspace)
                        else:
                            raise RuntimeError(
                                "SANSSingleReduction: The reduction mode {0} should not"
                                " be set with a can.".format(reduction_mode))
        if workspace_group_lab_can.size() > 0:
            # LAB group workspace is non-empty, so we want to set it as output
            self.setProperty("OutputWorkspaceLABCan", workspace_group_lab_can)
        if workspace_group_hab_can.size() > 0:
            self.setProperty("OutputWorkspaceHABCan", workspace_group_hab_can)
Exemple #18
0
    def set_reduced_can_count_and_norm_on_output(self, completed_event_slices):
        """
        Sets the reduced can count and norm group workspaces on the output properties.
        This includes the HAB/LAB counts and Norms

        :param completed_event_slices: a list containing a single list of output bundle parts
        """
        # Find the partial output bundles fo LAB Can and HAB Can if they exist
        lab_can_counts, hab_can_counts = WorkspaceGroup(), WorkspaceGroup()
        lab_can_norms, hab_can_norms = WorkspaceGroup(), WorkspaceGroup()

        for bundle in completed_event_slices:
            if bundle.output_bundle.data_type is DataType.CAN:
                reduction_mode = bundle.parts_bundle.reduction_mode
                output_workspace_count = bundle.parts_bundle.output_workspace_count
                output_workspace_norm = bundle.parts_bundle.output_workspace_norm
                # Make sure that the output workspace is not None which can be the case if there has never been a
                # can set for the reduction.
                if output_workspace_norm is not None and output_workspace_count is not None and \
                        not does_can_workspace_exist_on_ads(output_workspace_norm) and \
                        not does_can_workspace_exist_on_ads(output_workspace_count):
                    if reduction_mode is ReductionMode.LAB:
                        lab_can_counts.addWorkspace(output_workspace_count)
                        lab_can_norms.addWorkspace(output_workspace_norm)
                    elif reduction_mode is ReductionMode.HAB:
                        hab_can_counts.addWorkspace(output_workspace_count)
                        hab_can_norms.addWorkspace(output_workspace_norm)
                    else:
                        raise RuntimeError(
                            "SANSSingleReduction: The reduction mode {0} should not"
                            " be set with a partial can.".format(
                                reduction_mode))

        self._set_prop_if_group_has_data("OutputWorkspaceLABCanCount",
                                         lab_can_counts)
        self._set_prop_if_group_has_data("OutputWorkspaceLABCanNorm",
                                         lab_can_norms)
        self._set_prop_if_group_has_data("OutputWorkspaceHABCanCount",
                                         hab_can_counts)
        self._set_prop_if_group_has_data("OutputWorkspaceHABCanNorm",
                                         hab_can_norms)
Exemple #19
0
 def set_can_and_sam_on_output(self, completed_event_slices):
     """
     Sets the reduced can and sample workspaces.
     These is the LAB/HAB can and sample
     Cans are also output for optimization, so check for double output.
     :param output_bundles: a list containing a single list of output_bundles
     """
     lab_cans, hab_cans = WorkspaceGroup(), WorkspaceGroup()
     lab_samples, hab_samples = WorkspaceGroup(), WorkspaceGroup()
     for bundle in completed_event_slices:
         reduction_mode = bundle.output_bundle.reduction_mode
         output_workspace = bundle.output_bundle.output_workspace
         if bundle.output_bundle.data_type is DataType.CAN:
             if output_workspace is not None and not does_can_workspace_exist_on_ads(
                     output_workspace):
                 if reduction_mode is ReductionMode.LAB:
                     lab_cans.addWorkspace(output_workspace)
                 elif reduction_mode is ReductionMode.HAB:
                     hab_cans.addWorkspace(output_workspace)
                 else:
                     raise RuntimeError(
                         "SANSSingleReduction: The reduction mode {0} should not"
                         " be set with a can.".format(reduction_mode))
         elif bundle.output_bundle.data_type is DataType.SAMPLE:
             if output_workspace is not None and not does_can_workspace_exist_on_ads(
                     output_workspace):
                 if reduction_mode is ReductionMode.LAB:
                     lab_samples.addWorkspace(output_workspace)
                 elif reduction_mode is ReductionMode.HAB:
                     hab_samples.addWorkspace(output_workspace)
                 else:
                     raise RuntimeError(
                         "SANSSingleReduction: The reduction mode {0} should not"
                         " be set with a sample.".format(reduction_mode))
     self._set_prop_if_group_has_data("OutputWorkspaceLABCan", lab_cans)
     self._set_prop_if_group_has_data("OutputWorkspaceHABCan", hab_cans)
     self._set_prop_if_group_has_data("OutputWorkspaceLABSample",
                                      lab_samples)
     self._set_prop_if_group_has_data("OutputWorkspaceHABSample",
                                      hab_samples)
Exemple #20
0
    def create_run_workspaces(self, run):
        detectors = ['Detector 1', 'Detector 2', 'Detector 3', 'Detector 4']
        grpws = WorkspaceGroup()
        ws_detector1 = CreateSampleWorkspace()
        grpws.addWorkspace(ws_detector1)
        ws_detector2 = CreateSampleWorkspace()
        grpws.addWorkspace(ws_detector2)
        ws_detector3 = CreateSampleWorkspace()
        grpws.addWorkspace(ws_detector3)
        ws_detector4 = CreateSampleWorkspace()
        grpws.addWorkspace(ws_detector4)
        run_results = RunObject(run, detectors, grpws)

        self.model._loaded_data_store.add_data(run=[run], workspace=grpws)
        self.model._data_context.run_info_update(run_results)
Exemple #21
0
    def test_find_ws_to_use(self):
        run = 5555
        detector = 'Detector 3'
        run_detectors = [
            'Detector 1', 'Detector 2', 'Detector 3', 'Detector 4'
        ]

        grpws = WorkspaceGroup()
        ws_detector1 = CreateSampleWorkspace()
        grpws.addWorkspace(ws_detector1)
        ws_detector2 = CreateSampleWorkspace()
        grpws.addWorkspace(ws_detector2)
        ws_detector3 = CreateSampleWorkspace()
        grpws.addWorkspace(ws_detector3)
        ws_detector4 = CreateSampleWorkspace()
        grpws.addWorkspace(ws_detector4)

        self.model._loaded_data_store.add_data(run=[run], workspace=grpws)

        ws = load_utils_ea.find_ws_to_use(self.model, run_detectors, detector,
                                          run)
        self.assertEqual(ws.name(), 'ws_detector3')
    def set_output_workspaces(self, reduction_mode_vs_output_workspaces,
                              reduction_mode_vs_workspace_names):
        """
        Sets the output workspaces which can be HAB, LAB or Merged.

        At this step we also provide a workspace name to the sample logs which can be used later on for saving
        :param reduction_mode_vs_output_workspaces:  map from reduction mode to output workspace
        :param reduction_mode_vs_workspace_names: map from reduction mode to output workspace name
        """
        workspace_group_merged = WorkspaceGroup()
        workspace_group_lab = WorkspaceGroup()
        workspace_group_hab = WorkspaceGroup()
        # Note that this breaks the flexibility that we have established with the reduction mode. We have not hardcoded
        # HAB or LAB anywhere which means that in the future there could be other detectors of relevance. Here we
        # reference HAB and LAB directly since we currently don't want to rely on dynamic properties. See also in PyInit
        for reduction_mode, output_workspaces in list(
                reduction_mode_vs_output_workspaces.items()):
            workspace_names = reduction_mode_vs_workspace_names[reduction_mode]
            for output_workspace, output_name in zip(output_workspaces,
                                                     workspace_names):
                # In an MPI reduction output_workspace is produced on the master rank, skip others.
                if output_workspace is None:
                    continue
                else:
                    AnalysisDataService.addOrReplace(output_name,
                                                     output_workspace)
                if reduction_mode is ReductionMode.Merged:
                    workspace_group_merged.addWorkspace(output_workspace)
                elif reduction_mode is ISISReductionMode.LAB:
                    workspace_group_lab.addWorkspace(output_workspace)
                elif reduction_mode is ISISReductionMode.HAB:
                    workspace_group_hab.addWorkspace(output_workspace)
                else:
                    raise RuntimeError(
                        "SANSSingleReduction: Cannot set the output workspace. "
                        "The selected reduction mode {0} is unknown.".format(
                            reduction_mode))
        if workspace_group_merged.size() > 0:
            self.setProperty("OutputWorkspaceMerged", workspace_group_merged)
        if workspace_group_lab.size() > 0:
            self.setProperty("OutputWorkspaceLAB", workspace_group_lab)
        if workspace_group_hab.size() > 0:
            self.setProperty("OutputWorkspaceHAB", workspace_group_hab)
    def PyExec(self):
        input_workspaces = self._expand_groups()
        outWS = self.getPropertyValue("OutputWorkspace")
        CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                             InstrumentWorkspace=input_workspaces[0],
                             NumberOfPeaks=0,
                             OutputWorkspace=outWS,
                             EnableLogging=False)

        scale = self.getProperty("ScaleFactor").value
        chisqmax = self.getProperty("ChiSqMax").value
        signalNoiseMin = self.getProperty("SignalNoiseMin").value
        ll = self.getProperty("LowerLeft").value
        ur = self.getProperty("UpperRight").value
        startX = self.getProperty('StartX').value
        endX = self.getProperty('EndX').value
        use_lorentz = self.getProperty("ApplyLorentz").value
        optmize_q = self.getProperty("OptimizeQVector").value
        output_fit = self.getProperty("OutputFitResults").value

        if output_fit:
            fit_results = WorkspaceGroup()
            AnalysisDataService.addOrReplace(outWS + "_fit_results",
                                             fit_results)

        for inWS in input_workspaces:
            tmp_inWS = '__tmp_' + inWS
            IntegrateMDHistoWorkspace(InputWorkspace=inWS,
                                      P1Bin=f'{ll[1]},{ur[1]}',
                                      P2Bin=f'{ll[0]},{ur[0]}',
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)
            ConvertMDHistoToMatrixWorkspace(tmp_inWS,
                                            OutputWorkspace=tmp_inWS,
                                            EnableLogging=False)
            data = ConvertToPointData(tmp_inWS,
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)

            run = mtd[inWS].getExperimentInfo(0).run()
            scan_log = 'omega' if np.isclose(run.getTimeAveragedStd('phi'),
                                             0.0) else 'phi'
            scan_axis = run[scan_log].value
            data.setX(0, scan_axis)

            y = data.extractY().flatten()
            x = data.extractX().flatten()
            function = f"name=FlatBackground, A0={np.nanmin(y)};" \
                f"name=Gaussian, PeakCentre={x[np.nanargmax(y)]}, Height={np.nanmax(y)-np.nanmin(y)}, Sigma=0.25"
            constraints = f"f0.A0 > 0, f1.Height > 0, {x.min()} < f1.PeakCentre < {x.max()}"
            try:
                fit_result = Fit(function,
                                 data,
                                 Output=str(data),
                                 IgnoreInvalidData=True,
                                 OutputParametersOnly=not output_fit,
                                 Constraints=constraints,
                                 StartX=startX,
                                 EndX=endX,
                                 EnableLogging=False)
            except RuntimeError as e:
                self.log().warning("Failed to fit workspace {}: {}".format(
                    inWS, e))
                continue

            if fit_result.OutputStatus == 'success' and fit_result.OutputChi2overDoF < chisqmax:
                __tmp_pw = CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                                                InstrumentWorkspace=inWS,
                                                NumberOfPeaks=0,
                                                EnableLogging=False)

                _, A, x, s, _ = fit_result.OutputParameters.toDict()['Value']
                _, errA, _, errs, _ = fit_result.OutputParameters.toDict(
                )['Error']

                if scan_log == 'omega':
                    SetGoniometer(Workspace=__tmp_pw,
                                  Axis0=f'{x},0,1,0,-1',
                                  Axis1='chi,0,0,1,-1',
                                  Axis2='phi,0,1,0,-1',
                                  EnableLogging=False)
                else:
                    SetGoniometer(Workspace=__tmp_pw,
                                  Axis0='omega,0,1,0,-1',
                                  Axis1='chi,0,0,1,-1',
                                  Axis2=f'{x},0,1,0,-1',
                                  EnableLogging=False)

                peak = __tmp_pw.createPeakHKL([
                    run['h'].getStatistics().median,
                    run['k'].getStatistics().median,
                    run['l'].getStatistics().median
                ])
                peak.setWavelength(float(run['wavelength'].value))

                integrated_intensity = A * s * np.sqrt(2 * np.pi) * scale
                peak.setIntensity(integrated_intensity)

                # Convert correlation back into covariance
                cor_As = (
                    fit_result.OutputNormalisedCovarianceMatrix.cell(1, 4) /
                    100 * fit_result.OutputParameters.cell(1, 2) *
                    fit_result.OutputParameters.cell(3, 2))
                # σ^2 = 2π (A^2 σ_s^2 + σ_A^2 s^2 + 2 A s σ_As)
                integrated_intensity_error = np.sqrt(
                    2 * np.pi * (A**2 * errs**2 + s**2 * errA**2 +
                                 2 * A * s * cor_As)) * scale
                peak.setSigmaIntensity(integrated_intensity_error)

                if integrated_intensity / integrated_intensity_error > signalNoiseMin:
                    __tmp_pw.addPeak(peak)

                    # correct q-vector using CentroidPeaksMD
                    if optmize_q:
                        __tmp_q_ws = HB3AAdjustSampleNorm(InputWorkspaces=inWS,
                                                          NormaliseBy='None',
                                                          EnableLogging=False)
                        __tmp_pw = CentroidPeaksMD(__tmp_q_ws,
                                                   __tmp_pw,
                                                   EnableLogging=False)
                        DeleteWorkspace(__tmp_q_ws, EnableLogging=False)

                    if use_lorentz:
                        # ILL Neutron Data Booklet, Second Edition, Section 2.9, Part 4.1, Equation 7
                        peak = __tmp_pw.getPeak(0)
                        lorentz = abs(
                            np.sin(peak.getScattering() *
                                   np.cos(peak.getAzimuthal())))
                        peak.setIntensity(peak.getIntensity() * lorentz)
                        peak.setSigmaIntensity(peak.getSigmaIntensity() *
                                               lorentz)

                    CombinePeaksWorkspaces(outWS,
                                           __tmp_pw,
                                           OutputWorkspace=outWS,
                                           EnableLogging=False)
                    DeleteWorkspace(__tmp_pw, EnableLogging=False)

                    if output_fit:
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS + '_Workspace',
                                            outWS + "_" + inWS + '_Workspace',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS + '_Parameters',
                                            outWS + "_" + inWS + '_Parameters',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS +
                                            '_NormalisedCovarianceMatrix',
                                            outWS + "_" + inWS +
                                            '_NormalisedCovarianceMatrix',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            IntegrateMDHistoWorkspace(
                                InputWorkspace=inWS,
                                P1Bin=f'{ll[1]},0,{ur[1]}',
                                P2Bin=f'{ll[0]},0,{ur[0]}',
                                P3Bin='0,{}'.format(
                                    mtd[inWS].getDimension(2).getNBins()),
                                OutputWorkspace=outWS + "_" + inWS + "_ROI",
                                EnableLogging=False))
                else:
                    self.log().warning(
                        "Skipping peak from {} because Signal/Noise={:.3f} which is less than {}"
                        .format(
                            inWS,
                            integrated_intensity / integrated_intensity_error,
                            signalNoiseMin))
            else:
                self.log().warning(
                    "Failed to fit workspace {}: Output Status={}, ChiSq={}".
                    format(inWS, fit_result.OutputStatus,
                           fit_result.OutputChi2overDoF))

            for tmp_ws in (tmp_inWS, tmp_inWS + '_Workspace',
                           tmp_inWS + '_Parameters',
                           tmp_inWS + '_NormalisedCovarianceMatrix'):
                if mtd.doesExist(tmp_ws):
                    DeleteWorkspace(tmp_ws, EnableLogging=False)

        self.setProperty("OutputWorkspace", mtd[outWS])
Exemple #24
0
 def _create_non_ads_sample_workspaces():
     ws_group = WorkspaceGroup()
     for i in range(2):
         ws_group.addWorkspace(CreateSampleWorkspace(OutputWorkspace=str(uuid.uuid4()),
                               StoreInADS=False))
     return ws_group
 def _group_workspaces(self, workspaces):
     group = WorkspaceGroup()
     for ws in workspaces.values():
         if ws:
             group.addWorkspace(ws)
     return group
Exemple #26
0
    def PyExec(self):
        input_workspaces = self._expand_groups()
        outWS = self.getPropertyValue("OutputWorkspace")
        CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                             InstrumentWorkspace=input_workspaces[0],
                             NumberOfPeaks=0,
                             OutputWorkspace=outWS,
                             EnableLogging=False)

        method = self.getProperty("Method").value
        n_bkgr_pts = self.getProperty("NumBackgroundPts").value
        n_fwhm = self.getProperty("WidthScale").value
        scale = self.getProperty("ScaleFactor").value
        chisqmax = self.getProperty("ChiSqMax").value
        signalNoiseMin = self.getProperty("SignalNoiseMin").value
        ll = self.getProperty("LowerLeft").value
        ur = self.getProperty("UpperRight").value
        startX = self.getProperty('StartX').value
        endX = self.getProperty('EndX').value
        use_lorentz = self.getProperty("ApplyLorentz").value
        optmize_q = self.getProperty("OptimizeQVector").value
        output_fit = self.getProperty("OutputFitResults").value

        if output_fit and method != "Counts":
            fit_results = WorkspaceGroup()
            AnalysisDataService.addOrReplace(outWS + "_fit_results",
                                             fit_results)

        for inWS in input_workspaces:
            tmp_inWS = '__tmp_' + inWS
            IntegrateMDHistoWorkspace(InputWorkspace=inWS,
                                      P1Bin=f'{ll[1]},{ur[1]}',
                                      P2Bin=f'{ll[0]},{ur[0]}',
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)
            ConvertMDHistoToMatrixWorkspace(tmp_inWS,
                                            OutputWorkspace=tmp_inWS,
                                            EnableLogging=False)
            data = ConvertToPointData(tmp_inWS,
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)

            run = mtd[inWS].getExperimentInfo(0).run()
            scan_log = 'omega' if np.isclose(run.getTimeAveragedStd('phi'),
                                             0.0) else 'phi'
            scan_axis = run[scan_log].value
            scan_step = (scan_axis[-1] - scan_axis[0]) / (scan_axis.size - 1)
            data.setX(0, scan_axis)

            y = data.extractY().flatten()
            x = data.extractX().flatten()

            __tmp_pw = CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                                            InstrumentWorkspace=inWS,
                                            NumberOfPeaks=0,
                                            EnableLogging=False)

            if method != "Counts":
                # fit against gaussian with flat background for both the Fitted and CountsWithFitting methods
                fit_result = self._fit_gaussian(inWS, data, x, y, startX, endX,
                                                output_fit)

                if fit_result and fit_result.OutputStatus == 'success' and fit_result.OutputChi2overDoF < chisqmax:
                    B, A, peak_centre, sigma, _ = fit_result.OutputParameters.toDict(
                    )['Value']
                    _, errA, _, errs, _ = fit_result.OutputParameters.toDict(
                    )['Error']

                    if method == "Fitted":
                        integrated_intensity = A * sigma * np.sqrt(2 * np.pi)

                        # Convert correlation back into covariance
                        cor_As = (
                            fit_result.OutputNormalisedCovarianceMatrix.cell(
                                1, 4) / 100 *
                            fit_result.OutputParameters.cell(1, 2) *
                            fit_result.OutputParameters.cell(3, 2))
                        # σ^2 = 2π (A^2 σ_s^2 + σ_A^2 s^2 + 2 A s σ_As)
                        integrated_intensity_error = np.sqrt(
                            2 * np.pi * (A**2 * errs**2 + sigma**2 * errA**2 +
                                         2 * A * sigma * cor_As))

                    elif method == "CountsWithFitting":
                        y = y[slice(
                            np.searchsorted(
                                x, peak_centre - 2.3548 * sigma * n_fwhm / 2),
                            np.searchsorted(
                                x, peak_centre + 2.3548 * sigma * n_fwhm / 2))]
                        # subtract out the fitted flat background
                        integrated_intensity = (y.sum() -
                                                B * y.size) * scan_step
                        integrated_intensity_error = np.sum(
                            np.sqrt(y)) * scan_step

                    # update the goniometer position based on the fitted peak center
                    if scan_log == 'omega':
                        SetGoniometer(Workspace=__tmp_pw,
                                      Axis0=f'{peak_centre},0,1,0,-1',
                                      Axis1='chi,0,0,1,-1',
                                      Axis2='phi,0,1,0,-1',
                                      EnableLogging=False)
                    else:
                        SetGoniometer(Workspace=__tmp_pw,
                                      Axis0='omega,0,1,0,-1',
                                      Axis1='chi,0,0,1,-1',
                                      Axis2=f'{peak_centre},0,1,0,-1',
                                      EnableLogging=False)
                else:
                    self.log().warning(
                        "Failed to fit workspace {}: Output Status={}, ChiSq={}"
                        .format(inWS, fit_result.OutputStatus,
                                fit_result.OutputChi2overDoF))
                    self._delete_tmp_workspaces(str(__tmp_pw), tmp_inWS)
                    continue
            else:
                integrated_intensity, integrated_intensity_error = self._counts_integration(
                    data, n_bkgr_pts, scan_step)

                # set the goniometer position to use the average of the scan
                SetGoniometer(Workspace=__tmp_pw,
                              Axis0='omega,0,1,0,-1',
                              Axis1='chi,0,0,1,-1',
                              Axis2='phi,0,1,0,-1',
                              EnableLogging=False)

            integrated_intensity *= scale
            integrated_intensity_error *= scale

            peak = __tmp_pw.createPeakHKL([
                run['h'].getStatistics().median,
                run['k'].getStatistics().median,
                run['l'].getStatistics().median
            ])
            peak.setWavelength(float(run['wavelength'].value))
            peak.setIntensity(integrated_intensity)
            peak.setSigmaIntensity(integrated_intensity_error)

            if integrated_intensity / integrated_intensity_error > signalNoiseMin:
                __tmp_pw.addPeak(peak)

                # correct q-vector using CentroidPeaksMD
                if optmize_q:
                    __tmp_q_ws = HB3AAdjustSampleNorm(InputWorkspaces=inWS,
                                                      NormaliseBy='None',
                                                      EnableLogging=False)
                    __tmp_pw = CentroidPeaksMD(__tmp_q_ws,
                                               __tmp_pw,
                                               EnableLogging=False)
                    DeleteWorkspace(__tmp_q_ws, EnableLogging=False)

                if use_lorentz:
                    # ILL Neutron Data Booklet, Second Edition, Section 2.9, Part 4.1, Equation 7
                    peak = __tmp_pw.getPeak(0)
                    lorentz = abs(
                        np.sin(peak.getScattering() *
                               np.cos(peak.getAzimuthal())))
                    peak.setIntensity(peak.getIntensity() * lorentz)
                    peak.setSigmaIntensity(peak.getSigmaIntensity() * lorentz)

                CombinePeaksWorkspaces(outWS,
                                       __tmp_pw,
                                       OutputWorkspace=outWS,
                                       EnableLogging=False)

                if output_fit and method != "Counts":
                    fit_results.addWorkspace(
                        RenameWorkspace(tmp_inWS + '_Workspace',
                                        outWS + "_" + inWS + '_Workspace',
                                        EnableLogging=False))
                    fit_results.addWorkspace(
                        RenameWorkspace(tmp_inWS + '_Parameters',
                                        outWS + "_" + inWS + '_Parameters',
                                        EnableLogging=False))
                    fit_results.addWorkspace(
                        RenameWorkspace(
                            tmp_inWS + '_NormalisedCovarianceMatrix',
                            outWS + "_" + inWS + '_NormalisedCovarianceMatrix',
                            EnableLogging=False))
                    fit_results.addWorkspace(
                        IntegrateMDHistoWorkspace(
                            InputWorkspace=inWS,
                            P1Bin=f'{ll[1]},0,{ur[1]}',
                            P2Bin=f'{ll[0]},0,{ur[0]}',
                            P3Bin='0,{}'.format(
                                mtd[inWS].getDimension(2).getNBins()),
                            OutputWorkspace=outWS + "_" + inWS + "_ROI",
                            EnableLogging=False))
            else:
                self.log().warning(
                    "Skipping peak from {} because Signal/Noise={:.3f} which is less than {}"
                    .format(inWS,
                            integrated_intensity / integrated_intensity_error,
                            signalNoiseMin))

            self._delete_tmp_workspaces(str(__tmp_pw), tmp_inWS)

        self.setProperty("OutputWorkspace", mtd[outWS])
Exemple #27
0
    def test_event(self):
        # check that the workflow runs with event workspaces as input, junk data

        event_data = CreateSampleWorkspace(
            NumBanks=1,
            BinWidth=20000,
            PixelSpacing=0.1,
            BankPixelWidth=100,
            WorkspaceType="Event",
        )
        event_cal = CreateSampleWorkspace(
            NumBanks=1,
            BinWidth=20000,
            PixelSpacing=0.1,
            BankPixelWidth=100,
            WorkspaceType="Event",
            Function="Flat background",
        )
        event_bkg = CreateSampleWorkspace(
            NumBanks=1,
            BinWidth=20000,
            PixelSpacing=0.1,
            BankPixelWidth=100,
            WorkspaceType="Event",
            Function="Flat background",
        )

        # CASE 1
        # input single workspace, output single workspace
        pd_out = WANDPowderReduction(
            InputWorkspace=event_data,
            CalibrationWorkspace=event_cal,
            BackgroundWorkspace=event_bkg,
            Target="Theta",
            NumberBins=1000,
            NormaliseBy="None",
            Sum=False,
        )

        assert isinstance(pd_out, MatrixWorkspace)

        x = pd_out.extractX()
        y = pd_out.extractY()

        self.assertAlmostEqual(x.min(), 0.03517355)
        self.assertAlmostEqual(x.max(), 70.3119282)
        self.assertAlmostEqual(y[0, 0], 0.0)

        # CASE 2
        # input multiple single ws, output (single) summed ws
        pd_out = WANDPowderReduction(
            InputWorkspace=[event_data, event_data],
            CalibrationWorkspace=event_cal,
            BackgroundWorkspace=event_bkg,
            Target="Theta",
            NumberBins=1000,
            NormaliseBy="None",
            Sum=True,
        )

        x = pd_out.extractX()
        y = pd_out.extractY()

        self.assertAlmostEqual(x.min(), 0.03517355)
        self.assertAlmostEqual(x.max(), 70.3119282)
        self.assertAlmostEqual(y[0, 0], 0.0)
        assert isinstance(pd_out, MatrixWorkspace)

        # CASE 3
        # input group ws containing several ws, output group ws containing several ws
        pd_out = WANDPowderReduction(
            InputWorkspace=[event_data, event_data],
            CalibrationWorkspace=event_cal,
            BackgroundWorkspace=event_bkg,
            Target="Theta",
            NumberBins=1000,
            NormaliseBy="None",
            Sum=False,
        )

        for i in pd_out:

            x = i.extractX()
            y = i.extractY()

            self.assertAlmostEqual(x.min(), 0.03517355)
            self.assertAlmostEqual(x.max(), 70.3119282)
            self.assertAlmostEqual(y[0, 0], 0.0)

        assert isinstance(pd_out, WorkspaceGroup)
        assert len(pd_out) == 2

        event_data2 = CloneWorkspace(event_data)

        event_data_group = WorkspaceGroup()
        event_data_group.addWorkspace(event_data)
        event_data_group.addWorkspace(event_data2)

        # CASE 4 - input group ws, output group ws
        pd_out = WANDPowderReduction(
            InputWorkspace=event_data_group,
            CalibrationWorkspace=event_cal,
            BackgroundWorkspace=event_bkg,
            Target="Theta",
            NumberBins=1000,
            NormaliseBy="None",
            Sum=False,
        )

        for i in pd_out:
            x = i.extractX()
            y = i.extractY()

            self.assertAlmostEqual(x.min(), 0.03517355)
            self.assertAlmostEqual(x.max(), 70.3119282)
            self.assertAlmostEqual(y[0, 0], 0.0)

        assert isinstance(pd_out, WorkspaceGroup)
        assert len(pd_out) == 2

        event_data2 = CloneWorkspace(event_data)
        event_data_group = GroupWorkspaces([event_data, event_data2])

        pd_out = WANDPowderReduction(
            InputWorkspace=event_data_group,
            CalibrationWorkspace=event_cal,
            BackgroundWorkspace=event_bkg,
            Target="Theta",
            NumberBins=1000,
            NormaliseBy="None",
            Sum=False,
        )

        for i in pd_out:
            x = i.extractX()
            y = i.extractY()

            self.assertAlmostEqual(x.min(), 0.03517355)
            self.assertAlmostEqual(x.max(), 70.3119282)
            self.assertAlmostEqual(y[0, 0], 0.0)

        assert isinstance(pd_out, WorkspaceGroup)
        assert len(pd_out) == 2
    def set_can_and_sam_on_output(self, output_bundles):
        """
        Sets the reduced can and sam workspaces.
        These can be:
        1. LAB Can
        2. HAB Can
        3. LAB Sample
        4. HAB Sample
        Cans are also output for optimization, so check for double output.
        :param output_bundles: a list of output_bundles
        """
        workspace_group_lab_can = WorkspaceGroup()
        workspace_group_hab_can = WorkspaceGroup()
        workspace_group_lab_sample = WorkspaceGroup()
        workspace_group_hab_sample = WorkspaceGroup()

        for component_bundle in output_bundles:
            for output_bundle in component_bundle:
                if output_bundle.data_type is DataType.Can:
                    reduction_mode = output_bundle.reduction_mode
                    output_workspace = output_bundle.output_workspace

                    if output_workspace is not None and not does_can_workspace_exist_on_ads(
                            output_workspace):
                        can_name = self._get_output_workspace_name(
                            output_bundle.state,
                            output_bundle.reduction_mode,
                            can=True)
                        AnalysisDataService.addOrReplace(
                            can_name, output_workspace)
                        if reduction_mode is ISISReductionMode.LAB:
                            workspace_group_lab_can.addWorkspace(
                                output_workspace)
                        elif reduction_mode is ISISReductionMode.HAB:
                            workspace_group_hab_can.addWorkspace(
                                output_workspace)
                        else:
                            raise RuntimeError(
                                "SANSSingleReduction: The reduction mode {0} should not"
                                " be set with a can.".format(reduction_mode))
                elif output_bundle.data_type is DataType.Sample:
                    reduction_mode = output_bundle.reduction_mode
                    output_workspace = output_bundle.output_workspace

                    if output_workspace is not None:
                        sample_name = self._get_output_workspace_name(
                            output_bundle.state,
                            output_bundle.reduction_mode,
                            sample=True)
                        AnalysisDataService.addOrReplace(
                            sample_name, output_workspace)
                        if reduction_mode is ISISReductionMode.LAB:
                            workspace_group_lab_sample.addWorkspace(
                                output_workspace)
                        elif reduction_mode is ISISReductionMode.HAB:
                            workspace_group_hab_sample.addWorkspace(
                                output_workspace)
                        else:
                            raise RuntimeError(
                                "SANSSingleReduction: The reduction mode {0} should not"
                                " be set with a sample.".format(
                                    reduction_mode))

        if workspace_group_hab_can.size() > 0:
            self.setProperty("OutputWorkspaceHABCan", workspace_group_hab_can)
        if workspace_group_hab_sample.size() > 0:
            self.setProperty("OutputWorkspaceHABSample",
                             workspace_group_hab_sample)
        if workspace_group_lab_can.size() > 0:
            self.setProperty("OutputWorkspaceLABCan", workspace_group_lab_can)
        if workspace_group_lab_sample.size() > 0:
            self.setProperty("OutputWorkspaceLABSample",
                             workspace_group_lab_sample)
    def set_reduced_can_count_and_norm_on_output(self, output_bundles_parts):
        """
        Sets the reduced can count and norm group workspaces on the output properties.
        The reduced can workspaces can be:
        1. LAB Can Count
        2. LAB Can Norm
        3. HAB Can Count
        4. HAB Can Norm

        :param output_bundles_parts: a list of output bundle parts
        """
        workspace_group_lab_can_count = WorkspaceGroup()
        workspace_group_lab_can_norm = WorkspaceGroup()
        workspace_group_hab_can_count = WorkspaceGroup()
        workspace_group_hab_can_norm = WorkspaceGroup()
        # Find the partial output bundles fo LAB Can and HAB Can if they exist
        for event_slice_bundles in output_bundles_parts:
            for output_bundle_part in event_slice_bundles:
                if output_bundle_part.data_type is DataType.Can:
                    reduction_mode = output_bundle_part.reduction_mode
                    output_workspace_count = output_bundle_part.output_workspace_count
                    output_workspace_norm = output_bundle_part.output_workspace_norm
                    # Make sure that the output workspace is not None which can be the case if there has never been a
                    # can set for the reduction.
                    if output_workspace_norm is not None and output_workspace_count is not None and \
                            not does_can_workspace_exist_on_ads(output_workspace_norm) and \
                            not does_can_workspace_exist_on_ads(output_workspace_count):
                        name = self._get_output_workspace_name(
                            output_bundle_part.state,
                            output_bundle_part.reduction_mode)
                        count_name = name + "_hab_can_count"
                        norm_name = name + "_hab_can_norm"
                        AnalysisDataService.addOrReplace(
                            count_name, output_workspace_count)
                        AnalysisDataService.addOrReplace(
                            norm_name, output_workspace_norm)
                        if reduction_mode is ISISReductionMode.LAB:
                            workspace_group_lab_can_count.addWorkspace(
                                output_workspace_count)
                            workspace_group_lab_can_norm.addWorkspace(
                                output_workspace_norm)
                        elif reduction_mode is ISISReductionMode.HAB:
                            workspace_group_hab_can_count.addWorkspace(
                                output_workspace_count)
                            workspace_group_hab_can_norm.addWorkspace(
                                output_workspace_norm)
                        else:
                            raise RuntimeError(
                                "SANSSingleReduction: The reduction mode {0} should not"
                                " be set with a partial can.".format(
                                    reduction_mode))
        if workspace_group_lab_can_count.size() > 0:
            self.setProperty("OutputWorkspaceLABCanCount",
                             workspace_group_lab_can_count)
        if workspace_group_lab_can_norm.size() > 0:
            self.setProperty("OutputWorkspaceLABCanNorm",
                             workspace_group_lab_can_norm)
        if workspace_group_hab_can_count.size() > 0:
            self.setProperty("OutputWorkspaceHABCanCount",
                             workspace_group_hab_can_count)
        if workspace_group_hab_can_norm.size() > 0:
            self.setProperty("OutputWorkspaceHABCanNorm",
                             workspace_group_hab_can_norm)
Exemple #30
0
class TestCorelliDatabase(unittest.TestCase):

    test_dir = tempfile.TemporaryDirectory('_data_corelli')

    @classmethod
    def setUpClass(cls) -> None:
        r"""
        Load the tests cases for calibrate_bank, consisting of data for only one bank
        CORELLI_124023_bank10, tube 13 has shadows at pixel numbers quite different from the rest
        """
        config.appendDataSearchSubDir('CORELLI/calibration')
        for directory in config.getDataSearchDirs():
            if 'UnitTest' in directory:
                data_dir = path.join(directory, 'CORELLI', 'calibration')
                break
        cls.workspaces_temporary = list()
        cls.cases = dict()
        for bank_case in ('124016_bank10', '123454_bank58', '124023_bank10', '124023_banks_14_15'):
            workspace = 'CORELLI_' + bank_case
            LoadNexusProcessed(Filename=path.join(data_dir, workspace + '.nxs'), OutputWorkspace=workspace)
            cls.cases[bank_case] = workspace
            cls.workspaces_temporary.append(workspace)

    def setUp(self) -> None:
        # create a mock database
        # tests save_bank_table and load_bank_table, save_manifest
        self.database_path: str = TestCorelliDatabase.test_dir.name
        date: str = datetime.now().strftime('%Y%m%d')  # format YYYYMMDD

        calibrated_ws = init_corelli_table()
        calibrated_ws.addRow([28672, -1.2497636826045173])
        calibrated_ws.addRow([28673, -1.2462425728938251])
        calibrated_ws.addRow([28674, -1.2427213977528369])
        calibrated_ws.addRow([28675, -1.2392001571797284])
        save_bank_table(calibrated_ws, 10, self.database_path, date)

        calibrated_ws = init_corelli_table()
        calibrated_ws.addRow([28676, -1.2597636826045173])
        calibrated_ws.addRow([28677, -1.2562425728938251])
        calibrated_ws.addRow([28678, -1.2527213977528369])
        calibrated_ws.addRow([28679, -1.2492001571797284])
        save_bank_table(calibrated_ws, 20, self.database_path, date)

        calibrated_ws = init_corelli_table()
        calibrated_ws.addRow([28700, -1.1511478720770645])
        calibrated_ws.addRow([28701, -1.1476249296284657])
        calibrated_ws.addRow([28702, -1.2427213977528369])
        calibrated_ws.addRow([28703, -1.2392001571797284])
        save_bank_table(calibrated_ws, 30, self.database_path, date)

        calibrated_ws = init_corelli_table()
        calibrated_ws.addRow([28704, -1.1611478720770645])
        calibrated_ws.addRow([28705, -1.1776249296284657])
        calibrated_ws.addRow([28706, -1.2827213977528369])
        calibrated_ws.addRow([28707, -1.2992001571797284])
        save_bank_table(calibrated_ws, 40, self.database_path, '20200601')  # use different date

        calibrated_ws = init_corelli_table('calibration_' + str(40))
        calibrated_ws.addRow([28704, -1.1711478720770645])
        calibrated_ws.addRow([28705, -1.1876249296284657])
        calibrated_ws.addRow([28706, -1.2927213977528369])
        calibrated_ws.addRow([28707, -1.3092001571797284])
        save_bank_table(calibrated_ws, 40, self.database_path, '20200101')  # use different date

        # placeholder to read from the database
        self.ws_group = WorkspaceGroup()
        date: str = datetime.now().strftime('%Y%m%d')  # format YYYYMMDD
        self.ws_group.addWorkspace(load_bank_table(10, self.database_path, date))
        self.ws_group.addWorkspace(load_bank_table(20, self.database_path, date))
        self.ws_group.addWorkspace(load_bank_table(30, self.database_path, date))
        self.ws_group.addWorkspace(load_bank_table(40, self.database_path, '20200601'))

    def test_init_corelli_table(self):
        corelli_table = init_corelli_table()
        assert isinstance(corelli_table, TableWorkspace)

    def test_has_valid_columns(self):

        corelli_table = init_corelli_table()
        self.assertEqual(has_valid_columns(corelli_table), True)

        table_incomplete: TableWorkspace = CreateEmptyTableWorkspace()
        table_incomplete.addColumn(type="int", name="Detector ID")
        self.assertEqual(has_valid_columns(table_incomplete), False)

    def test_filename_bank_table(self):

        abs_subdir: str = self.database_path + '/bank001/'
        date: str = datetime.now().strftime('%Y%m%d')  # format YYYYMMDD

        # calibration
        expected_filename = str(pathlib.Path(abs_subdir + '/calibration_corelli_bank001_' + date + '.nxs.h5').resolve())
        filename = filename_bank_table(1, self.database_path, date, 'calibration')
        self.assertEqual(filename, expected_filename)

        # mask
        expected_filename = str(pathlib.Path(abs_subdir + '/mask_corelli_bank001_' + date + '.nxs.h5').resolve())
        filename = filename_bank_table(1, self.database_path, date, 'mask')
        self.assertEqual(filename, expected_filename)

        # fit
        expected_filename = str(pathlib.Path(abs_subdir + '/fit_corelli_bank001_' + date + '.nxs.h5').resolve())
        filename = filename_bank_table(1, self.database_path, date, 'fit')
        self.assertEqual(filename, expected_filename)

        # verify assertion is raised for invalid name
        with self.assertRaises(AssertionError) as ar:
            filename_bank_table(1, self.database_path, date, 'wrong')

        self.assertEqual('wrong is not a valid table type' in str(ar.exception), True)

    def test_combine_spatial_banks(self):

        # test with name
        combined_table = combine_spatial_banks(self.ws_group, name='calibrated_banks')
        self.assertTrue(combined_table.getName() == 'calibrated_banks')

        # test without name
        combined_table = combine_spatial_banks(self.ws_group)
        combined_dict = combined_table.toDict()

        expected_dict = {'Detector ID': [28672, 28673, 28674, 28675, 28676, 28677, 28678, 28679,
                                         28700, 28701, 28702, 28703, 28704, 28705, 28706, 28707],
                         'Detector Y Coordinate': [-1.24976368, -1.24624257, -1.2427214, -1.23920016, -1.25976368,
                                                   -1.25624257, -1.25272140, -1.24920016, -1.15114787, -1.14762493,
                                                   -1.2427214, -1.23920016, -1.16114787, -1.17762493, -1.28272140,
                                                   -1.29920016]
                         }

        self.assertEqual(expected_dict['Detector ID'], combined_dict['Detector ID'] )

        for i, expected_array in enumerate(expected_dict['Detector Y Coordinate']):
            self.assertAlmostEqual(expected_array, combined_dict['Detector Y Coordinate'][i])

    def test_save_manifest_file(self):

        date: str = datetime.now().strftime('%Y%m%d')  # format YYYYMMDD
        filename = self.database_path + '/manifest_corelli_' + date + '.csv'

        # writing
        save_manifest_file(self.database_path, [10, 11], [date, date])
        self.assertTrue(pathlib.Path(filename).is_file())

        file_contents = pathlib.Path(filename).read_text()  # safe one liner
        expected_manifest = f'bankID, timestamp\n10, {date}\n11, {date}\n'
        self.assertEqual(file_contents, expected_manifest)

        remove(filename)

    def test_day_stamp(self) -> None:
        self.assertEqual(day_stamp(self.cases['124016_bank10']), 20200106)
        self.assertEqual(day_stamp(self.cases['124023_bank10']), 20200109)
        self.assertEqual(day_stamp(self.cases['123454_bank58']), 20200103)
        self.assertEqual(day_stamp(self.cases['124023_banks_14_15']), 20200109)

    def test_save_calibration_set(self) -> None:
        calibrations, masks = calibrate_banks(self.cases['124023_banks_14_15'], '14-15')
        for w in ('calibrations', 'masks', 'fits'):
            assert AnalysisDataService.doesExist(w)

        # Save everything (typical case)
        database = tempfile.TemporaryDirectory()
        save_calibration_set(self.cases['124023_banks_14_15'], database.name, 'calibrations', 'masks', 'fits')
        for bn in ('014', '015'):  # bank number
            for ct in ('calibration', 'mask', 'fit'):  # table type
                assert path.exists(path.join(database.name, f'bank{bn}', f'{ct}_corelli_bank{bn}_20200109.nxs.h5'))
        database.cleanup()

        #  Save only the calibration tables
        database = tempfile.TemporaryDirectory()
        save_calibration_set(self.cases['124023_banks_14_15'], database.name, 'calibrations')
        for bn in ('014', '015'):  # bank number
            assert path.exists(path.join(database.name, f'bank{bn}', f'calibration_corelli_bank{bn}_20200109.nxs.h5'))
        database.cleanup()

        #  Save only the calibration tables as a list of strings
        database = tempfile.TemporaryDirectory()
        save_calibration_set(self.cases['124023_banks_14_15'], database.name, ['calib14', 'calib15'])
        for bn in ('014', '015'):  # bank number
            assert path.exists(path.join(database.name, f'bank{bn}', f'calibration_corelli_bank{bn}_20200109.nxs.h5'))
        database.cleanup()

        #  Save only the calibration tables as a list of workspaces
        database = tempfile.TemporaryDirectory()
        save_calibration_set(self.cases['124023_banks_14_15'], database.name, [mtd['calib14'], mtd['calib15']])
        for bn in ('014', '015'):  # bank number
            assert path.exists(path.join(database.name, f'bank{bn}', f'calibration_corelli_bank{bn}_20200109.nxs.h5'))
        database.cleanup()

        # Save only one table of each type, passing strings
        database = tempfile.TemporaryDirectory()
        save_calibration_set(self.cases['124023_banks_14_15'], database.name, 'calib14', 'mask14', 'fit14')
        for ct in ('calibration', 'mask', 'fit'):  # table type
            assert path.exists(path.join(database.name, 'bank014', f'{ct}_corelli_bank014_20200109.nxs.h5'))
        database.cleanup()

        # Save only one table of each type, passing workspaces
        database = tempfile.TemporaryDirectory()
        save_calibration_set(self.cases['124023_banks_14_15'], database.name,
                             mtd['calib14'], mtd['mask14'], mtd['fit14'])
        for ct in ('calibration', 'mask', 'fit'):  # table type
            assert path.exists(path.join(database.name, 'bank014', f'{ct}_corelli_bank014_20200109.nxs.h5'))
        database.cleanup()

    def test_verify_date_format(self) -> None:

        # success
        date: str = datetime.now().strftime('%Y%m%d')  # format YYYYMMDD
        verify_date_format('test_verify_date_format', date)

        # failure
        # verify assertion is raised for invalid date format
        with self.assertRaises(ValueError) as ar:
            verify_date_format('test_verify_date_format', '120711')

        self.assertEqual('date in function test_verify_date_format' in str(ar.exception), True)

        with self.assertRaises(ValueError) as ar:
            verify_date_format('test_verify_date_format', 'XX220101')

        self.assertEqual('date in function test_verify_date_format' in str(ar.exception), True)

    def test_combine_temporal_banks(self) -> None:

        date: str = datetime.now().strftime('%Y%m%d')  # format YYYYMMDD
        group_ws, bank_stamps = combine_temporal_banks(self.database_path, date)
        self.assertEqual([bs[0] for bs in bank_stamps], [10, 20, 30, 40])  # bank numbers
        self.assertEqual([bs[1] for bs in bank_stamps], [int(date), int(date), int(date), 20200601])  # day-stamps

        # list of expected group_ws components as dictionaries
        expected = []
        expected.append({'Detector ID': [28672, 28673, 28674, 28675],
                         'Detector Y Coordinate': [-1.24976368, -1.24624257, -1.24272140, -1.23920016]})

        expected.append({'Detector ID': [28676, 28677, 28678, 28679],
                         'Detector Y Coordinate': [-1.25976368, -1.25624257, -1.25272140, -1.24920016]})

        expected.append({'Detector ID': [28700, 28701, 28702, 28703],
                         'Detector Y Coordinate': [-1.15114787, -1.14762493, -1.24272140, -1.23920016]})
        # bank 40 from '20200601'
        expected.append({'Detector ID': [28704, 28705, 28706, 28707],
                         'Detector Y Coordinate': [-1.16114787, -1.17762493, -1.28272140, -1.29920016]})

        for b, ws in enumerate(group_ws):

            table_dict = ws.toDict()
            self.assertEqual(expected[b]['Detector ID'], table_dict['Detector ID'] )

            for i, expected_array in enumerate(expected[b]['Detector Y Coordinate']):
                self.assertAlmostEqual(expected_array, table_dict['Detector Y Coordinate'][i])

    def test_new_corelli_calibration_and_load_calibration(self):
        r"""Creating a database is time consuming, thus we test both new_corelli_calibration and load_calibration"""
        # populate a calibration database with a few cases. There should be at least one bank with two calibrations
        database = tempfile.TemporaryDirectory()
        cases = [('124016_bank10', '10'), ('124023_bank10', '10'), ('124023_banks_14_15', '14-15')]
        for bank_case, bank_selection in cases:
            # Produce workspace groups 'calibrations', 'masks', 'fits'
            calibrate_banks(self.cases[bank_case], bank_selection)
            masks = 'masks' if AnalysisDataService.doesExist('masks') else None
            save_calibration_set(self.cases[bank_case], database.name, 'calibrations', masks, 'fits')
            DeleteWorkspaces(['calibrations', 'fits'])
            if AnalysisDataService.doesExist('masks'):
                DeleteWorkspaces(['masks'])

        # invoque creation of  new corelli calibration without a date
        calibration_file, mask_file, manifest_file = new_corelli_calibration(database.name)
        for file_path in (calibration_file, mask_file, manifest_file):
            assert path.exists(file_path)
        assert open(manifest_file).read() == 'bankID, timestamp\n10, 20200109\n14, 20200109\n15, 20200109\n'

        # load latest calibration and mask (day-stamp of '124023_bank10' is 20200109)
        calibration, mask = load_calibration_set(self.cases['124023_bank10'], database.name,
                                                 mask_format='TableWorkspace')
        calibration_expected = LoadNexusProcessed(Filename=calibration_file)
        mask_expected = LoadNexusProcessed(Filename=mask_file)
        assert_allclose(calibration.column(1), calibration_expected.column(1), atol=1e-4)
        assert mask.column(0) == mask_expected.column(0)

        # invoque a new corelli calibration with a date falling in between the bank (bank10) in
        # in our small dataset having two calibrations
        calibration_file, mask_file, manifest_file = new_corelli_calibration(database.name, date='20200108')
        for file_path in (calibration_file, mask_file, manifest_file):
            assert path.exists(file_path)
        assert open(manifest_file).read() == 'bankID, timestamp\n10, 20200106\n'

        # load oldest calibration and mask(day-stamp of '124023_bank10' is 20200106)
        calibration, mask = load_calibration_set(self.cases['124016_bank10'], database.name,
                                                 mask_format='TableWorkspace')
        calibration_expected = LoadNexusProcessed(Filename=calibration_file)
        mask_expected = LoadNexusProcessed(Filename=mask_file)
        assert_allclose(calibration.column(1), calibration_expected.column(1), atol=1e-4)
        assert mask.column(0) == mask_expected.column(0)

        database.cleanup()

    def test_table_to_workspace(self) -> None:
        r"""Test the conversion of a TableWorkspace containing the masked detector ID's to a MaskWorkspace object"""
        output_workspace = 'test_table_to_workspace_masked'
        # Have a fake mask table, masking bank 42
        mask_table = CreateEmptyTableWorkspace(OutputWorkspace=output_workspace)
        mask_table.addColumn(type='int', name='Detector ID')
        begin, end = 167936, 172030  # # Bank 42 has detector ID's from 167936 to 172030
        for detector_id in range(begin, 1 + end):
            mask_table.addRow([detector_id])
        # Convert to MaskWorkspace
        mask_table = _table_to_workspace(mask_table)
        # Check the output workspace is of type MaskWorkspace
        assert isinstance(mask_table, MaskWorkspace)
        # Check the output workspace has 1 on workspace indexes for bank 42, and 0 elsewhere
        mask_flags = mask_table.extractY().flatten()
        offset = 3  # due to the detector monitors, workspace_index = detector_id + offset
        masked_workspace_indexes = slice(begin + offset, 1 + end + offset)
        assert np.all(mask_flags[masked_workspace_indexes])  # all values are 1
        mask_flags = np.delete(mask_flags, masked_workspace_indexes)
        assert not np.any(mask_flags)  # no value is 1
        DeleteWorkspace(output_workspace)

    def test_load_calibration_set(self) -> None:
        r"""
        1. create an empty "database"
          1.1. create a workspace with a particular daystamp
          1.2. try to find a file in the database
        2. create a database with only one calibration file with daystamp 20200601
          1.1 create a workspace with the following daystamps and see in which cases the calibration file is loaded
              20200101, 20200601, 20201201
        3. create a database with two calibration files with day-stamsp 20200401 and 20200801
          3.1 create a workspace with the following day-stamps and see which (in any) calibration is selected
             20200101, 20200401, 20200601, 20200801, 20201201
        """

        @contextmanager
        def mock_database(day_stamps: List[int]):
            r"""create a database with mock calibration files"""
            dir_path = tempfile.mkdtemp()
            path = pathlib.Path(dir_path)
            for daystamp in day_stamps:
                file_path = path / f'calibration_corelli_{daystamp}.nxs.h5'
                with open(str(file_path), 'w') as fp:
                    fp.write('mock')
            try:
                yield dir_path
            finally:
                shutil.rmtree(dir_path)

        def set_daystamp(input_workspace: str, daystamp: int):
            r"""Update the run_start log entry of a workspace
            :param input_workspace: handle to a workspace (not its name!)
            :param daystamp: 8-digit integer
            """
            x = str(daystamp)
            run_start = f'{x[0:4]}-{x[4:6]}-{x[6:]}T20:54:07.265105667'
            run = input_workspace.getRun()
            run.addProperty(name='run_start', value=run_start, replace=True)

        workspace = CreateSampleWorkspace(OutputWorkspace='test_load_calibration_set')
        set_daystamp(workspace, 20200101)

        # empty calibration database (corner case)
        with mock_database([]) as database_path:
            instrument_tables = load_calibration_set(workspace, database_path)
            assert list(instrument_tables) == [None, None]

        # database with only one calibration file (corner case)
        with mock_database([20200601]) as database_path:
            set_daystamp(workspace, 20200101)  # no calibration found
            assert list(load_calibration_set(workspace, database_path)) == [None, None]

            set_daystamp(workspace, 20200601)
            with self.assertRaises(RuntimeError) as ar:
                load_calibration_set(workspace, database_path)  # should pick calibration 20200601
            self.assertEqual('20200601' in str(ar.exception), True)

            set_daystamp(workspace, 20201201)
            with self.assertRaises(RuntimeError) as ar:
                load_calibration_set(workspace, database_path)
            self.assertEqual('calibration_corelli_20200601.nxs.h5' in str(ar.exception), True)

        # database with two calibration files (general case)
        with mock_database([20200401, 20200801]) as database_path:
            set_daystamp(workspace, '20200101')
            assert list(load_calibration_set(workspace, database_path)) == [None, None]

            set_daystamp(workspace, '20200401')
            with self.assertRaises(RuntimeError) as ar:
                load_calibration_set(workspace, database_path)
            self.assertEqual('calibration_corelli_20200401.nxs.h5' in str(ar.exception), True)

            set_daystamp(workspace, '20200601')
            with self.assertRaises(RuntimeError) as ar:
                load_calibration_set(workspace, database_path)
            self.assertEqual('calibration_corelli_20200401.nxs.h5' in str(ar.exception), True)

            set_daystamp(workspace, '20200801')
            with self.assertRaises(RuntimeError) as ar:
                load_calibration_set(workspace, database_path)
            self.assertEqual('calibration_corelli_20200801.nxs.h5' in str(ar.exception), True)

            set_daystamp(workspace, '20201201')
            with self.assertRaises(RuntimeError) as ar:
                load_calibration_set(workspace, database_path)
            self.assertEqual('calibration_corelli_20200801.nxs.h5' in str(ar.exception), True)
        workspace.delete()

    def tearDown(self) -> None:
        date: str = datetime.now().strftime('%Y%m%d')  # format YYYYMMDD
        remove(filename_bank_table(10, self.database_path, date))
        remove(filename_bank_table(20, self.database_path, date))
        remove(filename_bank_table(30, self.database_path, date))
        remove(filename_bank_table(40, self.database_path, '20200601'))
        remove(filename_bank_table(40, self.database_path, '20200101'))
        TestCorelliDatabase.test_dir.cleanup()

    @classmethod
    def tearDownClass(cls) -> None:
        r"""Delete temporary workspaces"""
        if len(cls.workspaces_temporary) > 0:
            DeleteWorkspaces(cls.workspaces_temporary)
)

pd_out2 = WANDPowderReduction(
    InputWorkspace=[event_data, event_data],
    CalibrationWorkspace=event_cal,
    BackgroundWorkspace=event_bkg,
    Target="Theta",
    NumberBins=1000,
    NormaliseBy="None",
    Sum=True,
)

event_data2 = CloneWorkspace(event_data)

event_data_group2 = WorkspaceGroup()
event_data_group2.addWorkspace(event_data)
event_data_group2.addWorkspace(event_data2)

pd_out = WANDPowderReduction(
    InputWorkspace=event_data_group2,
    CalibrationWorkspace=event_cal,
    BackgroundWorkspace=event_bkg,
    Target="Theta",
    NumberBins=1000,
    NormaliseBy="None",
    Sum=False,
)

event_data_group = GroupWorkspaces([event_data, event_data2])
pd_out = WANDPowderReduction(
    InputWorkspace=event_data_group,