def PyExec(self): # Get state state = self._get_state() # Get reduction mode overall_reduction_mode = self._get_reduction_mode(state) # Decide which core reduction information to run, i.e. HAB, LAB, ALL, MERGED. In the case of ALL and MERGED, # the required simple reduction modes need to be run. Normally this is HAB and LAB, future implementations # might have more detectors though (or different types) reduction_setting_bundles = self._get_reduction_setting_bundles( state, overall_reduction_mode) # Run core reductions use_optimizations = self.getProperty("UseOptimizations").value # Create the reduction core algorithm reduction_name = "SANSReductionCore" reduction_options = {} reduction_alg = create_child_algorithm(self, reduction_name, **reduction_options) # Set up progress progress = self._get_progress(len(reduction_setting_bundles), overall_reduction_mode) # -------------------------------------------------------------------------------------------------------------- # Reduction # -------------------------------------------------------------------------------------------------------------- output_bundles = [] output_parts_bundles = [] for reduction_setting_bundle in reduction_setting_bundles: progress.report("Running a single reduction ...") # We want to make use of optimizations here. If a can workspace has already been reduced with the same can # settings and is stored in the ADS, then we should use it (provided the user has optimizations enabled). if use_optimizations and reduction_setting_bundle.data_type is DataType.Can: output_bundle, output_parts_bundle = run_optimized_for_can( reduction_alg, reduction_setting_bundle) else: output_bundle, output_parts_bundle = run_core_reduction( reduction_alg, reduction_setting_bundle) output_bundles.append(output_bundle) output_parts_bundles.append(output_parts_bundle) # -------------------------------------------------------------------------------------------------------------- # Deal with merging # -------------------------------------------------------------------------------------------------------------- reduction_mode_vs_output_workspaces = {} # Merge if required with stitching etc. if overall_reduction_mode is ReductionMode.Merged: progress.report("Merging reductions ...") merge_bundle = get_merge_bundle_for_merge_request( output_parts_bundles, self) self.set_shift_and_scale_output(merge_bundle) reduction_mode_vs_output_workspaces.update( {ReductionMode.Merged: merge_bundle.merged_workspace}) # -------------------------------------------------------------------------------------------------------------- # Deal with non-merged # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results # -------------------------------------------------------------------------------------------------------------- progress.report("Final clean up...") output_workspaces_non_merged = get_final_output_workspaces( output_bundles, self) reduction_mode_vs_output_workspaces.update( output_workspaces_non_merged) # -------------------------------------------------------------------------------------------------------------- # Set the output workspaces # -------------------------------------------------------------------------------------------------------------- # Set sample logs # Todo: Set sample log -> Userfile and unfitted transmission workspace. Should probably set on # higher level (SANSBatch) # Set the output workspaces self.set_output_workspaces(reduction_mode_vs_output_workspaces) # -------------------------------------------------------------------------------------------------------------- # Set the reduced can workspaces on the output if optimizations are # enabled. This will allow SANSBatchReduction to add them to the ADS. # -------------------------------------------------------------------------------------------------------------- if use_optimizations: self.set_reduced_can_workspace_on_output(output_bundles, output_parts_bundles)
def _pyexec(self): # Get state state = self._get_state() # Get reduction mode overall_reduction_mode = self._get_reduction_mode(state) # -------------------------------------------------------------------------------------------------------------- # Perform the initial reduction. Version 1 does not have an initial reduction. # -------------------------------------------------------------------------------------------------------------- reduction_setting_bundles = self.do_initial_reduction( state, overall_reduction_mode) # -------------------------------------------------------------------------------------------------------------- # Setup main reduction # -------------------------------------------------------------------------------------------------------------- # Run core reductions use_optimizations = self.getProperty("UseOptimizations").value save_can = self.getProperty("SaveCan").value # Create the reduction core algorithm reduction_alg = create_child_algorithm(self, self._reduction_name(), **{}) # Set up progress progress = self._get_progress( sum([len(event_list) for event_list in reduction_setting_bundles]), overall_reduction_mode) # -------------------------------------------------------------------------------------------------------------- # Reduction - here we slice the workspaces and perform the steps which must be carried out after slicing # -------------------------------------------------------------------------------------------------------------- completed_event_slices: CompletedSlices = [] for event_slice in reduction_setting_bundles: # The single reductions represent CAN / sample reductions for bundle in event_slice: reduced_slices = self.do_reduction(reduction_alg, bundle, use_optimizations, progress) # Merge the list of lists into a single flat list to keep our lives easier completed_event_slices.extend(reduced_slices) # -------------------------------------------------------------------------------------------------------------- # Deal with non-merged # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results # -------------------------------------------------------------------------------------------------------------- progress.report("Final clean up...") workflow_alg_outputs = get_final_output_workspaces( completed_event_slices, self) # -------------------------------------------------------------------------------------------------------------- # Deal with merging # -------------------------------------------------------------------------------------------------------------- # Merge if required with stitching etc. scale_factors = [] shift_factors = [] if overall_reduction_mode is ReductionMode.MERGED: progress.report("Merging reductions ...") merge_bundle = get_merge_bundle_for_merge_request( completed_event_slices, self) for merged in merge_bundle: scale_factors.append(merged.scale) shift_factors.append(merged.shift) workflow_alg_outputs.merged_output.append( merged.merged_workspace) # Pack scaled HAB as a diagnostic tool scaled_HAB = strip_end_nans(merged.scaled_hab_workspace, self) workflow_alg_outputs.scaled_hab_output.append(scaled_HAB) self.set_shift_and_scale_output(scale_factors, shift_factors) # -------------------------------------------------------------------------------------------------------------- # Set the output workspaces # -------------------------------------------------------------------------------------------------------------- self.set_output_workspaces(workflow_alg_outputs) # -------------------------------------------------------------------------------------------------------------- # Set the reduced can workspaces on the output if optimizations are # enabled. This will allow SANSBatchReduction to add them to the ADS. # -------------------------------------------------------------------------------------------------------------- if use_optimizations: if save_can: self.set_reduced_can_workspace_on_output( completed_event_slices) self.set_reduced_can_count_and_norm_on_output( completed_event_slices) if save_can: self.set_can_and_sam_on_output(completed_event_slices) self.set_transmission_workspaces_on_output( completed_event_slices, state.adjustment.calculate_transmission.fit)
def _pyexec(self): # Get state state = self._get_state() # Get reduction mode overall_reduction_mode = self._get_reduction_mode(state) # -------------------------------------------------------------------------------------------------------------- # Perform the initial reduction. Version 1 does not have an initial reduction. # -------------------------------------------------------------------------------------------------------------- reduction_setting_bundles = self.do_initial_reduction( state, overall_reduction_mode) # -------------------------------------------------------------------------------------------------------------- # Setup main reduction # -------------------------------------------------------------------------------------------------------------- # Run core reductions use_optimizations = self.getProperty("UseOptimizations").value save_can = self.getProperty("SaveCan").value # Create the reduction core algorithm reduction_alg = create_child_algorithm(self, self._reduction_name(), **{}) # Set up progress progress = self._get_progress( sum([len(event_list) for event_list in reduction_setting_bundles]), overall_reduction_mode) # -------------------------------------------------------------------------------------------------------------- # Reduction - here we slice the workspaces and perform the steps which must be carried out after slicing # -------------------------------------------------------------------------------------------------------------- output_bundles, output_parts_bundles, \ output_transmission_bundles = self.do_reduction(reduction_alg, reduction_setting_bundles, use_optimizations, progress) reduction_mode_vs_output_workspaces = defaultdict(list) reduction_mode_vs_workspace_names = defaultdict(list) # -------------------------------------------------------------------------------------------------------------- # Deal with non-merged # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results # -------------------------------------------------------------------------------------------------------------- progress.report("Final clean up...") for event_slice_bundle in output_bundles: output_workspaces_non_merged = get_final_output_workspaces( event_slice_bundle, self) for reduction_mode, workspace in output_workspaces_non_merged.items( ): reduction_mode_vs_output_workspaces[reduction_mode].append( workspace) reduction_mode_vs_workspace_names = self._get_workspace_names( reduction_mode_vs_workspace_names, event_slice_bundle) # -------------------------------------------------------------------------------------------------------------- # Deal with merging # -------------------------------------------------------------------------------------------------------------- # Merge if required with stitching etc. scale_factors = [] shift_factors = [] if overall_reduction_mode is ReductionMode.MERGED: progress.report("Merging reductions ...") for i, event_slice_part_bundle in enumerate(output_parts_bundles): merge_bundle = get_merge_bundle_for_merge_request( event_slice_part_bundle, self) scale_factors.append(merge_bundle.scale) shift_factors.append(merge_bundle.shift) reduction_mode_vs_output_workspaces[ ReductionMode.MERGED].append(merge_bundle.merged_workspace) merged_name = self._get_merged_workspace_name( event_slice_part_bundle) reduction_mode_vs_workspace_names[ReductionMode.MERGED].append( merged_name) scaled_HAB = strip_end_nans(merge_bundle.scaled_hab_workspace, self) reduction_mode_vs_output_workspaces[ReductionMode.HAB].append( scaled_HAB) # Get HAB workspace name state = event_slice_part_bundle[0].state hab_name = self._get_output_workspace_name( state, reduction_mode=ReductionMode.HAB) reduction_mode_vs_workspace_names[ReductionMode.HAB].append( hab_name) self.set_shift_and_scale_output(scale_factors, shift_factors) # -------------------------------------------------------------------------------------------------------------- # Set the output workspaces # -------------------------------------------------------------------------------------------------------------- self.set_output_workspaces(reduction_mode_vs_output_workspaces, reduction_mode_vs_workspace_names) # -------------------------------------------------------------------------------------------------------------- # Set the reduced can workspaces on the output if optimizations are # enabled. This will allow SANSBatchReduction to add them to the ADS. # -------------------------------------------------------------------------------------------------------------- if use_optimizations: if not save_can: self.set_reduced_can_workspace_on_output(output_bundles) self.set_reduced_can_count_and_norm_on_output(output_parts_bundles) if save_can: self.set_can_and_sam_on_output(output_bundles) self.set_transmission_workspaces_on_output( output_transmission_bundles, state.adjustment.calculate_transmission.fit)
def PyExec(self): # Get state state = self._get_state() # Get reduction mode overall_reduction_mode = self._get_reduction_mode(state) # Decide which core reduction information to run, i.e. HAB, LAB, ALL, MERGED. In the case of ALL and MERGED, # the required simple reduction modes need to be run. Normally this is HAB and LAB, future implementations # might have more detectors though (or different types) reduction_setting_bundles = self._get_reduction_setting_bundles(state, overall_reduction_mode) # Run core reductions use_optimizations = self.getProperty("UseOptimizations").value save_can = self.getProperty("SaveCan").value # Create the reduction core algorithm reduction_name = "SANSReductionCore" reduction_options = {} reduction_alg = create_child_algorithm(self, reduction_name, **reduction_options) # Set up progress progress = self._get_progress(len(reduction_setting_bundles), overall_reduction_mode) # -------------------------------------------------------------------------------------------------------------- # Reduction # -------------------------------------------------------------------------------------------------------------- output_bundles = [] output_parts_bundles = [] output_transmission_bundles = [] for reduction_setting_bundle in reduction_setting_bundles: progress.report("Running a single reduction ...") # We want to make use of optimizations here. If a can workspace has already been reduced with the same can # settings and is stored in the ADS, then we should use it (provided the user has optimizations enabled). if use_optimizations and reduction_setting_bundle.data_type is DataType.Can: output_bundle, output_parts_bundle, output_transmission_bundle = run_optimized_for_can(reduction_alg, reduction_setting_bundle) else: output_bundle, output_parts_bundle, output_transmission_bundle = run_core_reduction(reduction_alg, reduction_setting_bundle) output_bundles.append(output_bundle) output_parts_bundles.append(output_parts_bundle) output_transmission_bundles.append(output_transmission_bundle) reduction_mode_vs_output_workspaces = {} # -------------------------------------------------------------------------------------------------------------- # Deal with non-merged # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results # -------------------------------------------------------------------------------------------------------------- progress.report("Final clean up...") output_workspaces_non_merged = get_final_output_workspaces(output_bundles, self) reduction_mode_vs_output_workspaces.update(output_workspaces_non_merged) # -------------------------------------------------------------------------------------------------------------- # Deal with merging # -------------------------------------------------------------------------------------------------------------- # Merge if required with stitching etc. if overall_reduction_mode is ReductionMode.Merged: progress.report("Merging reductions ...") merge_bundle = get_merge_bundle_for_merge_request(output_parts_bundles, self) self.set_shift_and_scale_output(merge_bundle) reduction_mode_vs_output_workspaces.update({ReductionMode.Merged: merge_bundle.merged_workspace}) scaled_HAB = strip_end_nans(merge_bundle.scaled_hab_workspace, self) reduction_mode_vs_output_workspaces.update({ISISReductionMode.HAB: scaled_HAB}) # -------------------------------------------------------------------------------------------------------------- # Set the output workspaces # -------------------------------------------------------------------------------------------------------------- # Set sample logs # Todo: Set sample log -> Userfile and unfitted transmission workspace. Should probably set on # higher level (SANSBatch) # Set the output workspaces self.set_output_workspaces(reduction_mode_vs_output_workspaces) # -------------------------------------------------------------------------------------------------------------- # Set the reduced can workspaces on the output if optimizations are # enabled. This will allow SANSBatchReduction to add them to the ADS. # -------------------------------------------------------------------------------------------------------------- if use_optimizations: self.set_reduced_can_workspace_on_output(output_bundles, output_parts_bundles) if save_can: self.set_can_and_sam_on_output(output_bundles) self.set_transmission_workspaces_on_output(output_transmission_bundles, state.adjustment.calculate_transmission.fit)