def on_save_state(self): UsageService.registerFeatureUsage( FeatureType.Feature, ["ISIS SANS", "Settings Diagnostics - Save JSON"], False) # Get the save location save_location = self._view.get_save_location() # Check if it exists path_dir = os.path.dirname(save_location) if not path_dir: self.gui_logger.warning( "The provided save location for the SANS state does not seem to exist. " "Please provide a validate path") return file_name, _ = os.path.splitext(save_location) full_file_path = file_name + JSON_SUFFIX row_index = self._view.get_current_row() state = self.get_state(row_index) Serializer.save_file(state, full_file_path) self.gui_logger.information( "The state for row {} has been saved to: {} ".format( row_index, full_file_path)) # Update the file name in the UI self._view.set_save_location(full_file_path)
def test_that_angle_masking_is_applied(self): # Arrange file_information_factory = SANSFileInformationFactory() file_information = file_information_factory.create_sans_file_information( "SANS2D00028827") data_builder = get_data_builder(SANSFacility.ISIS, file_information) data_builder.set_sample_scatter("SANS2D00028827") data_info = data_builder.build() mask_builder = get_mask_builder(data_info) # Expected_spectra phi_mirror = False phi_min = 0. phi_max = 90. # This should mask everything except for the upper right quadrant # | 120 |-------------------| # | |---------------------| # | 60 |-------------------| # | |----------------------| # | # | # |-------------------|------------------| # 512 256 0 expected_spectra = [] # The strange double pattern arises from the offset of the SANS2D tube geometry (see InstrumentView) for y in range(60, 120): if y % 2 == 0: expected_spectra.extend( ((y * 512) + 9 + x for x in range(0, 255))) else: expected_spectra.extend( ((y * 512) + 9 + x for x in range(0, 257))) expected_spectra.extend((x for x in range(92169, 122889))) # HAB mask_builder.set_use_mask_phi_mirror(phi_mirror) mask_builder.set_phi_min(phi_min) mask_builder.set_phi_max(phi_max) mask_info = mask_builder.build() test_director = TestDirector() test_director.set_states(data_state=data_info, mask_state=mask_info) state = test_director.construct() returned_data = SANSLoad(SANSState=Serializer.to_json(state), SampleScatterWorkspace="mask_sans_ws", SampleScatterMonitorWorkspace="dummy") workspace = returned_data[0] DeleteWorkspace(returned_data[1]) # Act workspace = mask_workspace(state=state, component_as_string="LAB", workspace=workspace) # Assert self._do_assert_non_masked(workspace, expected_spectra)
def set_properties_for_beam_centre_algorithm(beam_centre_alg, reduction_package, workspace_to_name, workspace_to_monitor): """ Sets up everything necessary on the beam centre algorithm. :param beam_centre_alg: a handle to the beam centre algorithm :param reduction_package: a reduction package object :param workspace_to_name: the workspace to name map :param workspace_to_monitor: a workspace to monitor map """ # Go through the elements of the reduction package and set them on the beam centre algorithm # Set the SANSState state = reduction_package.state state_json = Serializer.to_json(state) beam_centre_alg.setProperty("SANSState", state_json) # Set the input workspaces workspaces = reduction_package.workspaces for workspace_type, workspace in list(workspaces.items()): if workspace is not None: beam_centre_alg.setProperty(workspace_to_name[workspace_type], workspace) # Set the monitors monitors = reduction_package.monitors for workspace_type, monitor in list(monitors.items()): if monitor is not None: beam_centre_alg.setProperty(workspace_to_monitor[workspace_type], monitor)
def _load_workspace(self, state): load_alg = AlgorithmManager.createUnmanaged("SANSLoad") load_alg.setChild(True) load_alg.initialize() state_dict = Serializer.to_json(state) load_alg.setProperty("SANSState", state_dict) load_alg.setProperty("PublishToCache", False) load_alg.setProperty("UseCached", False) load_alg.setProperty("SampleScatterWorkspace", EMPTY_NAME) load_alg.setProperty("SampleScatterMonitorWorkspace", EMPTY_NAME) if state.data.sample_transmission: load_alg.setProperty("SampleTransmissionWorkspace", EMPTY_NAME) if state.data.sample_direct: load_alg.setProperty("SampleDirectWorkspace", EMPTY_NAME) # Act load_alg.execute() self.assertTrue(load_alg.isExecuted()) sample_scatter = load_alg.getProperty("SampleScatterWorkspace").value sample_scatter_monitor_workspace = load_alg.getProperty( "SampleScatterMonitorWorkspace").value if state.data.sample_transmission: transmission_workspace = load_alg.getProperty( "SampleTransmissionWorkspace").value else: transmission_workspace = None if state.data.sample_direct: direct_workspace = load_alg.getProperty( "SampleDirectWorkspace").value else: direct_workspace = None return sample_scatter, sample_scatter_monitor_workspace, transmission_workspace, direct_workspace
def _run_reduction_core(self, state, workspace, monitor, transmission=None, direct=None, detector_type=DetectorType.LAB, component=DataType.SAMPLE): reduction_core_alg = AlgorithmManager.createUnmanaged("SANSReductionCore") reduction_core_alg.setChild(True) reduction_core_alg.initialize() state_dict = Serializer.to_json(state) reduction_core_alg.setProperty("SANSState", state_dict) reduction_core_alg.setProperty("ScatterWorkspace", workspace) reduction_core_alg.setProperty("ScatterMonitorWorkspace", monitor) if transmission: reduction_core_alg.setProperty("TransmissionWorkspace", transmission) if direct: reduction_core_alg.setProperty("DirectWorkspace", direct) reduction_core_alg.setProperty("Component", detector_type.value) reduction_core_alg.setProperty("DataType", component.value) reduction_core_alg.setProperty("OutputWorkspace", EMPTY_NAME) reduction_core_alg.setProperty("CalculatedTransmissionWorkspace", EMPTY_NAME) reduction_core_alg.setProperty("UnfittedTransmissionWorkspace", EMPTY_NAME) # Act reduction_core_alg.execute() self.assertTrue(reduction_core_alg.isExecuted()) return reduction_core_alg
def _run_load(state, publish_to_cache, use_cached, move_workspace=False, beam_coordinates=None, component=None, output_workspace_names=None): load_alg = AlgorithmManager.createUnmanaged("SANSLoad") load_alg.setChild(True) load_alg.setRethrows(True) load_alg.initialize() state_dict = Serializer.to_json(state) load_alg.setProperty("SANSState", state_dict) load_alg.setProperty("PublishToCache", publish_to_cache) load_alg.setProperty("UseCached", use_cached) if move_workspace: load_alg.setProperty("Component", component) load_alg.setProperty("BeamCoordinates", beam_coordinates) if output_workspace_names: for name, value in output_workspace_names.items(): load_alg.setProperty(name, value) # Act load_alg.execute() # self.assertTrue(load_alg.isExecuted()) return load_alg
def _run_single_reduction(self, state, sample_scatter, sample_monitor, sample_transmission=None, sample_direct=None, can_scatter=None, can_monitor=None, can_transmission=None, can_direct=None, output_settings=None, event_slice_optimisation=False, save_can=False, use_optimizations=False): single_reduction_name = "SANSSingleReduction" ver = 1 if not event_slice_optimisation else 2 state_dict = Serializer.to_json(state) single_reduction_options = { "SANSState": state_dict, "SampleScatterWorkspace": sample_scatter, "SampleScatterMonitorWorkspace": sample_monitor, "UseOptimizations": use_optimizations, "SaveCan": save_can } if sample_transmission: single_reduction_options.update( {"SampleTransmissionWorkspace": sample_transmission}) if sample_direct: single_reduction_options.update( {"SampleDirectWorkspace": sample_direct}) if can_scatter: single_reduction_options.update( {"CanScatterWorkspace": can_scatter}) if can_monitor: single_reduction_options.update( {"CanScatterMonitorWorkspace": can_monitor}) if can_transmission: single_reduction_options.update( {"CanTransmissionWorkspace": can_transmission}) if can_direct: single_reduction_options.update({"CanDirectWorkspace": can_direct}) if output_settings: single_reduction_options.update(output_settings) single_reduction_alg = create_unmanaged_algorithm( single_reduction_name, version=ver, **single_reduction_options) # Act single_reduction_alg.execute() self.assertTrue(single_reduction_alg.isExecuted()) return single_reduction_alg
def display_state_diagnostic_tree(self, state): # Convert to dict before passing the state to the view dict_vals = None if state: state = Serializer.to_json(state) dict_vals = json.loads(state) # We intentionally do not use serializer to get a dict type back self._view.set_tree(dict_vals)
def _load_workspace(self, state, workspace_name): self._prepare_to_load_scatter_sample_only(state) self._handle_multi_period_data(state) serialized_state = Serializer.to_json(state) workspace = self._perform_load(serialized_state) self._perform_move(state, workspace) self._store_in_ads_as_hidden(workspace_name, workspace) return workspace
def get_state_hash_for_can_reduction(state, reduction_mode, partial_type=None): """ Creates a hash for a (modified) state object. Note that we need to modify the state object to exclude elements which are not relevant for the can reduction. This is primarily the setting of the sample workspaces. This is the only place where we directly alter the value of a state object in the entire reduction workflow. Note that we are not changing the :param state: a SANSState object. :param reduction_mode: the reduction mode, here it can be LAB or HAb :param partial_type: if it is a partial type, then it needs to be specified here. :return: the hash of the state """ def remove_sample_related_information(full_state): state_to_hash = deepcopy(full_state) # Data state_to_hash.data.sample_scatter = EMPTY_NAME state_to_hash.data.sample_scatter_period = ALL_PERIODS state_to_hash.data.sample_transmission = EMPTY_NAME state_to_hash.data.sample_transmission_period = ALL_PERIODS state_to_hash.data.sample_direct = EMPTY_NAME state_to_hash.data.sample_direct_period = ALL_PERIODS state_to_hash.data.sample_scatter_run_number = 1 # Save state_to_hash.save.user_specified_output_name = "" return state_to_hash new_state = remove_sample_related_information(state) new_state_serialized = Serializer.to_json(new_state) new_state_serialized = json.dumps(new_state_serialized, sort_keys=True, indent=4) # Add a tag for the reduction mode state_string = str(new_state_serialized) if reduction_mode is ReductionMode.LAB: state_string += "LAB" elif reduction_mode is ReductionMode.HAB: state_string += "HAB" else: raise RuntimeError( "Only LAB and HAB reduction modes are allowed at this point." " {} was provided".format(reduction_mode)) # If we are dealing with a partial output workspace, then mark it as such if partial_type is OutputParts.COUNT: state_string += "counts" elif partial_type is OutputParts.NORM: state_string += "norm" elif partial_type is TransmissionType.CALCULATED: state_string += "calculated_transmission" elif partial_type is TransmissionType.UNFITTED: state_string += "unfitted_transmission" return str(get_hash_value(state_string))
def validateInputs(self): errors = dict() # Check that the input can be converted into the right state object state_property_manager = self.getProperty("SANSState").value try: state = Serializer.from_json(state_property_manager) state.validate() except ValueError as err: errors.update({"SANSCreateAdjustmentWorkspaces": str(err)}) return errors
def test_that_enum_list_can_be_serialized(self): original_obj = ExampleWrapper() original_obj.bar = [FakeEnumClass.BAR, FakeEnumClass.BAR] # Serializing test serialized = Serializer.to_json(original_obj) self.assertTrue("bar" in serialized) self.assertTrue("_foo" in serialized) self.assertTrue(isinstance(serialized, str)) # Deserializing Test fake = JsonSerializerTest.FakeAlgorithm() fake.initialize() fake.setProperty("Args", serialized) property_manager = fake.getProperty("Args").value new_obj = Serializer.from_json(property_manager) self.assertEqual(original_obj.bar, new_obj.bar) self.assertEqual(original_obj._foo, new_obj._foo)
def test_that_sans_state_can_be_serialized_and_deserialized_when_going_through_an_algorithm( self): # Arrange state = ComplexState() # Act serialized = Serializer.to_json(state) fake = JsonSerializerTest.FakeAlgorithm() fake.initialize() fake.setProperty("Args", serialized) property_manager = fake.getProperty("Args").value # Assert self.assertEqual(type(serialized), str) state_2 = Serializer.from_json(property_manager) # The direct sub state self.assertEqual(state.sub_state_1.float_list_parameter, state_2.sub_state_1.float_list_parameter) # The regular parameters self.assertEqual(state_2.float_parameter, 23.) self.assertEqual(state_2.positive_float_with_none_parameter, 234.)
def PyExec(self): # Read the state state_json = self.getProperty("SANSState").value state = Serializer.from_json(state_json) # -------------------------------------- # Get the monitor normalization workspace # -------------------------------------- monitor_normalization_workspace = self._get_monitor_normalization_workspace( state) # -------------------------------------- # Get the calculated transmission # -------------------------------------- calculated_transmission_workspace, unfitted_transmission_workspace = \ self._get_calculated_transmission_workspace(state) # -------------------------------------- # Get the wide angle correction workspace # -------------------------------------- wave_length_and_pixel_adjustment_workspace = self._get_wide_angle_correction_workspace( state, calculated_transmission_workspace) # noqa # -------------------------------------------- # Get the full wavelength and pixel adjustment # -------------------------------------------- wave_length_adjustment_workspace, \ pixel_length_adjustment_workspace = self._get_wavelength_and_pixel_adjustment_workspaces(monitor_normalization_workspace, # noqa calculated_transmission_workspace) # noqa if wave_length_adjustment_workspace: self.setProperty("OutputWorkspaceWavelengthAdjustment", wave_length_adjustment_workspace) if pixel_length_adjustment_workspace: self.setProperty("OutputWorkspacePixelAdjustment", pixel_length_adjustment_workspace) if wave_length_and_pixel_adjustment_workspace: self.setProperty("OutputWorkspaceWavelengthAndPixelAdjustment", wave_length_and_pixel_adjustment_workspace) self.setProperty("CalculatedTransmissionWorkspace", calculated_transmission_workspace) self.setProperty("UnfittedTransmissionWorkspace", unfitted_transmission_workspace)
def _run_beam_centre_core(self, state, workspace, monitor, transmission=None, direct=None, detector_type=DetectorType.LAB, component=DataType.SAMPLE, centre_1=0.1, centre_2=-0.1, r_min=0.06, r_max=0.26): beam_centre_core_alg = AlgorithmManager.createUnmanaged( "SANSBeamCentreFinderCore") beam_centre_core_alg.setChild(True) beam_centre_core_alg.initialize() state_dict = Serializer.to_json(state) beam_centre_core_alg.setProperty("SANSState", state_dict) beam_centre_core_alg.setProperty("ScatterWorkspace", workspace) beam_centre_core_alg.setProperty("ScatterMonitorWorkspace", monitor) if transmission: beam_centre_core_alg.setProperty("TransmissionWorkspace", transmission) if direct: beam_centre_core_alg.setProperty("DirectWorkspace", direct) beam_centre_core_alg.setProperty("Component", detector_type.value) beam_centre_core_alg.setProperty("DataType", component.value) beam_centre_core_alg.setProperty("Centre1", centre_1) beam_centre_core_alg.setProperty("Centre2", centre_2) beam_centre_core_alg.setProperty("RMax", r_max) beam_centre_core_alg.setProperty("RMin", r_min) beam_centre_core_alg.setProperty("OutputWorkspaceLeft", EMPTY_NAME) beam_centre_core_alg.setProperty("OutputWorkspaceRight", EMPTY_NAME) beam_centre_core_alg.setProperty("OutputWorkspaceTop", EMPTY_NAME) beam_centre_core_alg.setProperty("OutputWorkspaceBottom", EMPTY_NAME) # Act beam_centre_core_alg.execute() self.assertTrue(beam_centre_core_alg.isExecuted()) return beam_centre_core_alg
def run_initial_event_slice_reduction(reduction_alg, reduction_setting_bundle): """ This function runs the initial core reduction for event slice data. This is essentially half a reduction (either sample or can), and is run before event slicing has been performed. :param reduction_alg: a handle to the initial event slice reduction algorithm. :param reduction_setting_bundle: a ReductionSettingBundle tuple :return: a EventSliceReductionSettingBundle tuple """ # Get component to reduce component = get_component_to_reduce(reduction_setting_bundle) # Set the properties on the reduction algorithms serialized_state = Serializer.to_json(reduction_setting_bundle.state) reduction_alg.setProperty("SANSState", serialized_state) reduction_alg.setProperty("Component", component) reduction_alg.setProperty("ScatterWorkspace", reduction_setting_bundle.scatter_workspace) reduction_alg.setProperty( "ScatterMonitorWorkspace", reduction_setting_bundle.scatter_monitor_workspace) reduction_alg.setProperty("DataType", reduction_setting_bundle.data_type.value) reduction_alg.setProperty("OutputWorkspace", EMPTY_NAME) reduction_alg.setProperty("OutputMonitorWorkspace", EMPTY_NAME) # Run the reduction core reduction_alg.execute() # Get the results output_workspace = reduction_alg.getProperty("OutputWorkspace").value mask_workspace = reduction_alg.getProperty("DummyMaskWorkspace").value output_monitor_workspace = reduction_alg.getProperty( "OutputMonitorWorkspace").value return EventSliceSettingBundle( state=reduction_setting_bundle.state, data_type=reduction_setting_bundle.data_type, reduction_mode=reduction_setting_bundle.reduction_mode, output_parts=reduction_setting_bundle.output_parts, scatter_workspace=output_workspace, dummy_mask_workspace=mask_workspace, scatter_monitor_workspace=output_monitor_workspace, direct_workspace=reduction_setting_bundle.direct_workspace, transmission_workspace=reduction_setting_bundle.transmission_workspace)
def run_core_reduction(reduction_alg, reduction_setting_bundle): """ This function runs a core reduction. This is essentially half a reduction (either sample or can). :param reduction_alg: a handle to the reduction algorithm. :param reduction_setting_bundle: a ReductionSettingBundle tuple :return: an OutputBundle and an OutputPartsBundle """ # Get component to reduce component = get_component_to_reduce(reduction_setting_bundle) # Set the properties on the reduction algorithms serialized_state = Serializer.to_json(reduction_setting_bundle.state) reduction_alg.setProperty("SANSState", serialized_state) reduction_alg.setProperty("Component", component) reduction_alg.setProperty("ScatterWorkspace", reduction_setting_bundle.scatter_workspace) reduction_alg.setProperty( "ScatterMonitorWorkspace", reduction_setting_bundle.scatter_monitor_workspace) reduction_alg.setProperty("DataType", reduction_setting_bundle.data_type.value) if reduction_setting_bundle.transmission_workspace is not None: reduction_alg.setProperty( "TransmissionWorkspace", reduction_setting_bundle.transmission_workspace) if reduction_setting_bundle.direct_workspace is not None: reduction_alg.setProperty("DirectWorkspace", reduction_setting_bundle.direct_workspace) reduction_alg.setProperty("OutputWorkspaces", EMPTY_NAME) reduction_alg.setProperty("SumOfCounts", EMPTY_NAME) reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME) # Run the reduction core reduction_alg.execute() # Get the results reduced_slices = _pack_bundles(reduction_alg, reduction_setting_bundle) return reduced_slices
def run_core_event_slice_reduction(reduction_alg, reduction_setting_bundle): """ This function runs a core reduction for event slice data. This reduction slices by event time and converts to q. All other operations, such as moving and converting to histogram, have been performed before the event slicing. :param reduction_alg: a handle to the reduction algorithm. :param reduction_setting_bundle: a ReductionSettingBundle tuple :return: a list of reduced slices """ # Get component to reduce component = get_component_to_reduce(reduction_setting_bundle) # Set the properties on the reduction algorithms serialized_state = Serializer.to_json(reduction_setting_bundle.state) reduction_alg.setProperty("SANSState", serialized_state) reduction_alg.setProperty("Component", component) reduction_alg.setProperty("ScatterWorkspace", reduction_setting_bundle.scatter_workspace) reduction_alg.setProperty("DirectWorkspace", reduction_setting_bundle.direct_workspace) reduction_alg.setProperty("TransmissionWorkspace", reduction_setting_bundle.transmission_workspace) reduction_alg.setProperty("DummyMaskWorkspace", reduction_setting_bundle.dummy_mask_workspace) reduction_alg.setProperty( "ScatterMonitorWorkspace", reduction_setting_bundle.scatter_monitor_workspace) reduction_alg.setProperty("DataType", reduction_setting_bundle.data_type.value) reduction_alg.setProperty("OutputWorkspaces", EMPTY_NAME) reduction_alg.setProperty("SumOfCounts", EMPTY_NAME) reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME) # Run the reduction core reduction_alg.execute() # Pull the result out of the workspace reduced_slices = _pack_bundles(reduction_alg, reduction_setting_bundle) return reduced_slices
def PyExec(self): # Read the state state_property_manager = self.getProperty("SANSState").value state = Serializer.from_json(state_property_manager) # Run the appropriate SANSLoader and get the workspaces and the workspace monitors # Note that cache optimization is only applied to the calibration workspace since it is not available as a # return property and it is also something which is most likely not to change between different reductions. use_cached = self.getProperty("UseCached").value publish_to_ads = self.getProperty("PublishToCache").value data = state.data state_adjustment = state.adjustment progress = self._get_progress_for_file_loading(data, state_adjustment) # Get the correct SANSLoader from the SANSLoaderFactory load_factory = SANSLoadDataFactory() loader = load_factory.create_loader(state) workspaces, workspace_monitors = loader.execute( data_info=data, use_cached=use_cached, publish_to_ads=publish_to_ads, progress=progress, parent_alg=self, adjustment_info=state.adjustment) progress.report("Loaded the data.") progress_move = Progress(self, start=0.8, end=1.0, nreports=2) progress_move.report("Starting to move the workspaces.") self._perform_initial_move(workspaces, state) progress_move.report("Finished moving the workspaces.") # Set output workspaces for workspace_type, workspace in workspaces.items(): self.set_output_for_workspaces(workspace_type, workspace) # Set the output monitor workspaces for workspace_type, workspace in workspace_monitors.items(): self.set_output_for_monitor_workspaces(workspace_type, workspace)
def test_that_can_save_out_state(self): # Arrange parent_presenter = create_run_tab_presenter_mock() view = create_mock_settings_diagnostic_tab() dummy_file_path = os.path.join(tempfile.gettempdir(), "sans_settings_diag_test.json") print(dummy_file_path) view.get_save_location = mock.MagicMock(return_value=dummy_file_path) presenter = SettingsDiagnosticPresenter(parent_presenter) presenter.set_view(view) # Act presenter.on_save_state() # Assert self.assertTrue(os.path.exists(dummy_file_path)) obj = Serializer.load_file(dummy_file_path) self.assertEqual("dummy_state", obj.dummy_state) if os.path.exists(dummy_file_path): os.remove(dummy_file_path)
def _get_state(self): state_json = self.getProperty("SANSState").value state = Serializer.from_json(state_json) return state
def validateInputs(self): errors = dict() # Check that the input can be converted into the right state object state_json = self.getProperty("SANSState").value try: state = Serializer.from_json(state_json) state.validate() except ValueError as err: errors.update({"SANSState": str(err)}) return errors # We need to validate that the for each expected output workspace of the SANSState a output workspace name # was supplied in the PyInit # For sample scatter sample_scatter = self.getProperty("SampleScatterWorkspace").value sample_scatter_as_string = self.getProperty( "SampleScatterWorkspace").valueAsStr if sample_scatter is None and not sample_scatter_as_string: errors.update({ "SampleScatterWorkspace": "A sample scatter output workspace needs to be specified." }) # For sample scatter monitor sample_scatter_monitor = self.getProperty( "SampleScatterMonitorWorkspace").value sample_scatter_monitor_as_string = self.getProperty( "SampleScatterMonitorWorkspace").valueAsStr if sample_scatter_monitor is None and not sample_scatter_monitor_as_string: errors.update({ "SampleScatterMonitorWorkspace": "A sample scatter output workspace needs to be specified." }) # ------------------------------------ # Check the optional output workspaces # If they are specified in the SANSState, then we require them to be set on the output as well. data_info = state.data # For sample transmission sample_transmission = self.getProperty( "SampleTransmissionWorkspace").value sample_transmission_as_string = self.getProperty( "SampleTransmissionWorkspace").valueAsStr sample_transmission_was_set = sample_transmission is not None or len( sample_transmission_as_string) > 0 sample_transmission_from_state = data_info.sample_transmission if not sample_transmission_was_set and sample_transmission_from_state is not None: errors.update({ "SampleTransmissionWorkspace": "You need to set the output for the sample transmission" " workspace since it is specified to be loaded in your " "reduction configuration." }) if sample_transmission_was_set and sample_transmission_from_state is None: errors.update({ "SampleTransmissionWorkspace": "You set an output workspace for sample transmission, " "although none is specified in the reduction configuration." }) # For sample direct sample_direct = self.getProperty("SampleDirectWorkspace").value sample_direct_as_string = self.getProperty( "SampleDirectWorkspace").valueAsStr sample_direct_was_set = sample_direct is not None or len( sample_direct_as_string) > 0 sample_direct_from_state = data_info.sample_direct if not sample_direct_was_set and sample_direct_from_state is not None: errors.update({ "SampleDirectWorkspace": "You need to set the output for the sample direct" " workspace since it is specified to be loaded in your " "reduction configuration." }) if sample_direct_was_set and sample_direct_from_state is None: errors.update({ "SampleDirectWorkspace": "You set an output workspace for sample direct, " "although none is specified in the reduction configuration." }) # For can scatter + monitor can_scatter = self.getProperty("CanScatterWorkspace").value can_scatter_as_string = self.getProperty( "CanScatterWorkspace").valueAsStr can_scatter_was_set = can_scatter is not None or len( can_scatter_as_string) > 0 can_scatter_from_state = data_info.can_scatter if not can_scatter_was_set and can_scatter_from_state is not None: errors.update({ "CanScatterWorkspace": "You need to set the output for the can scatter" " workspace since it is specified to be loaded in your " "reduction configuration." }) if can_scatter_was_set and can_scatter_from_state is None: errors.update({ "CanScatterWorkspace": "You set an output workspace for can scatter, " "although none is specified in the reduction configuration." }) # For can scatter monitor can_scatter_monitor = self.getProperty( "CanScatterMonitorWorkspace").value can_scatter_monitor_as_string = self.getProperty( "CanScatterMonitorWorkspace").valueAsStr can_scatter_monitor_was_set = can_scatter_monitor is not None or len( can_scatter_monitor_as_string) > 0 if not can_scatter_monitor_was_set and can_scatter_from_state is not None: errors.update({ "CanScatterMonitorWorkspace": "You need to set the output for the can scatter monitor" " workspace since it is specified to be loaded in your " "reduction configuration." }) if can_scatter_monitor_was_set and can_scatter_from_state is None: errors.update({ "CanScatterMonitorWorkspace": "You set an output workspace for can scatter monitor, " "although none is specified in the reduction configuration." }) # For sample transmission can_transmission = self.getProperty("CanTransmissionWorkspace").value can_transmission_as_string = self.getProperty( "CanTransmissionWorkspace").valueAsStr can_transmission_was_set = can_transmission is not None or len( can_transmission_as_string) > 0 can_transmission_from_state = data_info.can_transmission if not can_transmission_was_set and can_transmission_from_state is not None: errors.update({ "CanTransmissionWorkspace": "You need to set the output for the can transmission" " workspace since it is specified to be loaded in your " "reduction configuration." }) if can_transmission_was_set and can_transmission_from_state is None: errors.update({ "CanTransmissionWorkspace": "You set an output workspace for can transmission, " "although none is specified in the reduction configuration." }) # For can direct can_direct = self.getProperty("CanDirectWorkspace").value can_direct_as_string = self.getProperty( "CanDirectWorkspace").valueAsStr can_direct_was_set = can_direct is not None or len( can_direct_as_string) > 0 can_direct_from_state = data_info.can_direct if not can_direct_was_set and can_direct_from_state is not None: errors.update({ "CanDirectWorkspace": "You need to set the output for the can direct" " workspace since it is specified to be loaded in your " "reduction configuration." }) if can_direct_was_set and can_direct_from_state is None: errors.update({ "CanDirectWorkspace": "You set an output workspace for can direct, " "although none is specified in the reduction configuration." }) return errors
def run_core_event_slice_reduction(reduction_alg, reduction_setting_bundle): """ This function runs a core reduction for event slice data. This reduction slices by event time and converts to q. All other operations, such as moving and converting to histogram, have been performed before the event slicing. :param reduction_alg: a handle to the reduction algorithm. :param reduction_setting_bundle: a ReductionSettingBundle tuple :return: an OutputBundle and an OutputPartsBundle """ # Get component to reduce component = get_component_to_reduce(reduction_setting_bundle) # Set the properties on the reduction algorithms serialized_state = Serializer.to_json(reduction_setting_bundle.state) reduction_alg.setProperty("SANSState", serialized_state) reduction_alg.setProperty("Component", component) reduction_alg.setProperty("ScatterWorkspace", reduction_setting_bundle.scatter_workspace) reduction_alg.setProperty("DirectWorkspace", reduction_setting_bundle.direct_workspace) reduction_alg.setProperty("TransmissionWorkspace", reduction_setting_bundle.transmission_workspace) reduction_alg.setProperty("DummyMaskWorkspace", reduction_setting_bundle.dummy_mask_workspace) reduction_alg.setProperty( "ScatterMonitorWorkspace", reduction_setting_bundle.scatter_monitor_workspace) reduction_alg.setProperty("DataType", reduction_setting_bundle.data_type.value) reduction_alg.setProperty("OutputWorkspace", EMPTY_NAME) reduction_alg.setProperty("SumOfCounts", EMPTY_NAME) reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME) # Run the reduction core reduction_alg.execute() # Get the results output_workspace = reduction_alg.getProperty("OutputWorkspace").value output_workspace_count = reduction_alg.getProperty("SumOfCounts").value output_workspace_norm = reduction_alg.getProperty("SumOfNormFactors").value output_calculated_transmission_workspace = reduction_alg.getProperty( "CalculatedTransmissionWorkspace").value output_unfitted_transmission_workspace = reduction_alg.getProperty( "UnfittedTransmissionWorkspace").value # Pull the result out of the workspace output_bundle = OutputBundle( state=reduction_setting_bundle.state, data_type=reduction_setting_bundle.data_type, reduction_mode=reduction_setting_bundle.reduction_mode, output_workspace=output_workspace) output_parts_bundle = OutputPartsBundle( state=reduction_setting_bundle.state, data_type=reduction_setting_bundle.data_type, reduction_mode=reduction_setting_bundle.reduction_mode, output_workspace_count=output_workspace_count, output_workspace_norm=output_workspace_norm) output_transmission_bundle = OutputTransmissionBundle( state=reduction_setting_bundle.state, data_type=reduction_setting_bundle.data_type, calculated_transmission_workspace= output_calculated_transmission_workspace, unfitted_transmission_workspace=output_unfitted_transmission_workspace, ) return output_bundle, output_parts_bundle, output_transmission_bundle
def run_core_reduction(reduction_alg, reduction_setting_bundle): """ This function runs a core reduction. This is essentially half a reduction (either sample or can). :param reduction_alg: a handle to the reduction algorithm. :param reduction_setting_bundle: a ReductionSettingBundle tuple :return: an OutputBundle and an OutputPartsBundle """ # Get component to reduce component = get_component_to_reduce(reduction_setting_bundle) # Set the properties on the reduction algorithms serialized_state = Serializer.to_json(reduction_setting_bundle.state) reduction_alg.setProperty("SANSState", serialized_state) reduction_alg.setProperty("Component", component) reduction_alg.setProperty("ScatterWorkspace", reduction_setting_bundle.scatter_workspace) reduction_alg.setProperty( "ScatterMonitorWorkspace", reduction_setting_bundle.scatter_monitor_workspace) reduction_alg.setProperty("DataType", reduction_setting_bundle.data_type.value) if reduction_setting_bundle.transmission_workspace is not None: reduction_alg.setProperty( "TransmissionWorkspace", reduction_setting_bundle.transmission_workspace) if reduction_setting_bundle.direct_workspace is not None: reduction_alg.setProperty("DirectWorkspace", reduction_setting_bundle.direct_workspace) reduction_alg.setProperty("OutputWorkspace", EMPTY_NAME) reduction_alg.setProperty("SumOfCounts", EMPTY_NAME) reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME) # Run the reduction core reduction_alg.execute() # Get the results output_workspace = reduction_alg.getProperty("OutputWorkspace").value output_workspace_count = reduction_alg.getProperty("SumOfCounts").value output_workspace_norm = reduction_alg.getProperty("SumOfNormFactors").value output_calculated_transmission_workspace = reduction_alg.getProperty( "CalculatedTransmissionWorkspace").value output_unfitted_transmission_workspace = reduction_alg.getProperty( "UnfittedTransmissionWorkspace").value # Pull the result out of the workspace output_bundle = OutputBundle( state=reduction_setting_bundle.state, data_type=reduction_setting_bundle.data_type, reduction_mode=reduction_setting_bundle.reduction_mode, output_workspace=output_workspace) output_parts_bundle = OutputPartsBundle( state=reduction_setting_bundle.state, data_type=reduction_setting_bundle.data_type, reduction_mode=reduction_setting_bundle.reduction_mode, output_workspace_count=output_workspace_count, output_workspace_norm=output_workspace_norm) output_transmission_bundle = OutputTransmissionBundle( state=reduction_setting_bundle.state, data_type=reduction_setting_bundle.data_type, calculated_transmission_workspace= output_calculated_transmission_workspace, unfitted_transmission_workspace=output_unfitted_transmission_workspace) return output_bundle, output_parts_bundle, output_transmission_bundle