def test_dtype_function_calls(self): """ Tests the dtype() function call for the data types stored in the array. """ # Set up direc = Direction.Output validator = NullValidator() # Create float array float_input_values = [1.1, 2.5, 5.6, 4.6, 9.0, 6.0] float_arr = FloatArrayProperty("floats", float_input_values, validator, direc) # Create int array int_input_values = [1, 2, 5, 4, 9, 6] int_arr = IntArrayProperty("integers", int_input_values, validator, direc) # Create string array str_input_values = ["a", "b", "c", "d", "e"] str_arr = StringArrayProperty("letters", str_input_values, validator, direc) # Test self.assertEquals(float_arr.dtype(), "f") self.assertEquals(int_arr.dtype(), "i") self.assertEquals(str_arr.dtype(), "S1")
def PyInit(self): self.declareProperty( mantid.api.WorkspaceProperty( "Workspace", "", direction=Direction.InOut, optional=mantid.api.PropertyMode.Optional), "Input workspace (optional)") allowedInstrumentList = StringListValidator([''] + self.INSTRUMENT_LIST) self.declareProperty("Instrument", "", validator=allowedInstrumentList, doc="One of the following instruments: " + ', '.join(self.INSTRUMENT_LIST)) self.declareProperty(StringArrayProperty(name='Components', values=[]), doc='Component names to mask') self.declareProperty( IntArrayProperty(name="Bank", values=[]), doc="Bank(s) to be masked. If empty, will apply to all banks") self.declareProperty( "Tube", "", doc="Tube(s) to be masked. If empty, will apply to all tubes") self.declareProperty( "Pixel", "", doc="Pixel(s) to be masked. If empty, will apply to all pixels") self.declareProperty(IntArrayProperty(name="MaskedDetectors", direction=Direction.Output), doc="List of masked detectors")
def PyInit(self): self.declareProperty(MultipleFileProperty(name="Filename", action=FileAction.OptionalLoad, extensions=[".dat"]), "Data files to load") condition = EnabledWhenProperty("Filename", PropertyCriterion.IsDefault) self.declareProperty('IPTS', Property.EMPTY_INT, "IPTS number to load from") self.setPropertySettings("IPTS", condition) self.declareProperty('Exp', Property.EMPTY_INT, "Experiment number to load from") self.setPropertySettings("Exp", condition) self.declareProperty(IntArrayProperty("ScanNumbers", []), 'Scan numbers to load') self.setPropertySettings("ScanNumbers", condition) self.declareProperty(FileProperty(name="Vanadium", defaultValue="", action=FileAction.OptionalLoad, extensions=[".dat", ".txt"]), doc="Vanadium file, can be either the vanadium scan file or the reduced vcorr file. " "If not provided the vcorr file adjacent to the data file will be used") self.declareProperty('Normalise', True, "If False vanadium normalisation will not be performed") self.declareProperty(IntArrayProperty("ExcludeDetectors", []), doc="Detectors to exclude. If not provided the HB2A_exp???__exclude_detectors.txt adjacent " "to the data file will be used if it exist") self.declareProperty('DefX', '', "By default the def_x (x-axis) from the file will be used, it can be overridden by setting it here") self.declareProperty('IndividualDetectors', False, "If True the workspace will include each anode as a separate spectrum, useful for debugging issues") condition = EnabledWhenProperty("IndividualDetectors", PropertyCriterion.IsDefault) self.declareProperty('BinData', True, "Data will be binned using BinWidth. If False then all data will be unbinned") self.setPropertySettings("BinData", condition) positiveFloat = FloatBoundedValidator(lower=0., exclusive=True) self.declareProperty('BinWidth', 0.05, positiveFloat, "Bin size of the output workspace") self.setPropertySettings("BinWidth", condition) self.declareProperty('Scale', 1.0, positiveFloat, "The output will be scaled by this value") self.declareProperty(WorkspaceProperty("OutputWorkspace", "", optional=PropertyMode.Mandatory, direction=Direction.Output), "Output Workspace")
def PyInit(self): """ Declare properties """ self.declareProperty(mantid.api.WorkspaceProperty( "Workspace", "", direction=mantid.kernel.Direction.Input, validator=mantid.api.InstrumentValidator()), "Input workspace") self.declareProperty(IntArrayProperty("UpstreamSpectra", Direction.Output)) self.declareProperty(FloatArrayProperty("UpstreamDetectorDistances", Direction.Output)) self.declareProperty(IntArrayProperty("DownstreamSpectra", Direction.Output)) self.declareProperty(FloatArrayProperty("DownstreamDetectorDistances", Direction.Output))
def test_construct_numpy_array_with_given_dtype_int(self): # Set up direc = Direction.Output validator = NullValidator() # Create int array int_input_values = [1, 2, 5, 4, 9, 6] int_arr = IntArrayProperty("integers", int_input_values, validator, direc) # Use the returned dtype() to check it works with numpy arrays x = np.arange(1, 10, dtype=int_arr.dtype()) self.assertIsInstance(x, np.ndarray) self.assertEquals(x.dtype, int_arr.dtype())
def PyInit(self): """ Declare properties """ self.declareProperty( WorkspaceProperty("Workspace", "", Direction.Input), "The workspace to export masks from.") self.declareProperty( FileProperty(name="Filename", defaultValue="", action=FileAction.OptionalSave, extensions=[".msk"], direction=Direction.Input), doc="The name or full path to the file to save mask to." " If empty, the name of the input workspace and default save directory are used." ) self.declareProperty( "ExportMaskOnly", False, "If true, algorithm will not save mask in a file" "and only returns the list containing numbers of masked spectra.", Direction.Input) self.declareProperty(IntArrayProperty(name="SpectraMasks", direction=Direction.Output), doc="List of the masked spectra numbers.") return
def PyInit(self): self.declareProperty(IMDEventWorkspaceProperty("InputWorkspace", defaultValue="", optional=PropertyMode.Mandatory, direction=Direction.Input), doc="Input MDEvent workspace to convert to a MDHisto in HKL") self.declareProperty(IPeaksWorkspaceProperty("PeaksWorkspace", defaultValue="", optional=PropertyMode.Optional, direction=Direction.Input), doc="Optional peaks workspace to retrieve the UB matrix from, instead of InputWorkspace.") self.declareProperty(FloatArrayProperty("Uproj", [1, 0, 0], validator=FloatArrayLengthValidator(3), direction=Direction.Input), doc="Defines the first projection vector of the target Q coordinate system in HKL mode") self.declareProperty(FloatArrayProperty("Vproj", [0, 1, 0], validator=FloatArrayLengthValidator(3), direction=Direction.Input), doc="Defines the second projection vector of the target Q coordinate system in HKL mode") self.declareProperty(FloatArrayProperty("Wproj", [0, 0, 1], validator=FloatArrayLengthValidator(3), direction=Direction.Input), doc="Defines the third projection vector of the target Q coordinate system in HKL mode") self.declareProperty(FloatArrayProperty("Extents", [-6.02, 6.02, -6.02, 6.02, -6.02, 6.02], direction=Direction.Input), "Binning parameters for each dimension. Enter it as a" "comma-separated list of values with the" "format: 'minimum,maximum,'.") self.declareProperty(IntArrayProperty("Bins", [301, 301, 301], direction=Direction.Input), "Number of bins to use for each dimension, Enter it as a" "comma-separated list of integers.") self.declareProperty(IMDHistoWorkspaceProperty("OutputWorkspace", "", optional=PropertyMode.Mandatory, direction=Direction.Output), doc="Output MDWorkspace in Q-space, name is prefix if multiple input files were provided.")
def PyInit(self): int_validator = IntArrayOrderedPairsValidator() self.declareProperty( IntArrayProperty("IntInput", int_validator)) float_validator = FloatArrayOrderedPairsValidator() self.declareProperty( FloatArrayProperty("FloatInput", float_validator))
def PyInit(self): self.declareProperty(MultipleFileProperty(name="Filename", action=FileAction.OptionalLoad, extensions=[".nxs.h5"]), "Files to Load") self.declareProperty('IPTS', Property.EMPTY_INT, "IPTS number to load from") self.declareProperty(IntArrayProperty("RunNumbers", []), 'Run numbers to load') self.declareProperty("ApplyMask", True, "If True standard masking will be applied to the workspace") self.declareProperty("Grouping", 'None', StringListValidator(['None', '2x2', '4x4']), "Group pixels") self.declareProperty(WorkspaceProperty(name="OutputWorkspace", defaultValue="", direction=Direction.Output))
def _getRuns(self, rlist, doIndiv=True): """ Create sets of run numbers for analysis. A semicolon indicates a separate group of runs to be processed together. Parameters ---------- rlist: string Run numbers to be reduced. doIndiv: bool Marks if files are to be reduced together Returns ------- list If `doIndiv` is `False`, return a list of `IntArrayProperty` objects. Each item is a pseudolist containing a set of runs to be reduced together. If `doIndiv` is `True`, return a list of strings, each string is a run number. """ run_list = [] # ";" separates the runs into substrings. Each substring represents a set of runs rlvals = rlist.split(';') for rlval in rlvals: iap = IntArrayProperty('', rlval) # split the substring if doIndiv: run_list.extend([[x] for x in iap.value]) else: run_list.append(iap.value) return run_list
def _get_runs(self, rlist, doIndiv=True): r""" Create sets of run numbers for analysis. A semicolon indicates a separate group of runs to be processed together. Parameters ---------- rlist: str All the run numbers to be reduced. doIndiv: bool Return each run on its own list Returns ------- list If _doIndiv is False, return a list of IntArrayProperty objects. Each item is a pseudolist containing a set of runs to be reduced together. if _doIndiv is True, return a list of strings, each string is a run number. """ run_list = [] # ';' separates the runs into substrings. Each substring # represents a set of runs rlvals = rlist.split(';') for rlval in rlvals: iap = IntArrayProperty('_get_runs_iap', rlval) # substring split if doIndiv: run_list.extend([[x] for x in iap.value]) else: run_list.append(iap.value) return run_list
def _get_runs(self, rlist, doIndiv=True): r""" Create sets of run numbers for analysis. A semicolon indicates a separate group of runs to be processed together. Parameters ---------- rlist: str All the run numbers to be reduced. doIndiv: bool Return each run on its own list Returns ------- list Items of this list are lists. If `doIndiv` is True, each item list is made up of a single run number. If `doIndiv` is False, each item list is made up of several run numbers which are to be reduced together. """ run_list = [] # ';' separates the runs into substrings. Each substring # represents a set of runs rlvals = rlist.split(';') for rlval in rlvals: iap = IntArrayProperty('_get_runs_iap', rlval) # substring split if doIndiv: run_list.extend([[str(x)] for x in iap.value]) else: run_list.append([str(x) for x in iap.value]) return run_list
def PyInit(self): self.declareProperty( FloatArrayProperty("FloatInput", FloatArrayMandatoryValidator())) self.declareProperty( IntArrayProperty("IntInput", IntArrayMandatoryValidator())) self.declareProperty( StringArrayProperty("StringInput", StringArrayMandatoryValidator()))
def PyInit(self): # Input workspace/data info (filename or IPTS+RunNumber) # Priority: Filename > IPTS + RunNumber self.declareProperty( MultipleFileProperty(name="Filename", action=FileAction.OptionalLoad, extensions=[".nxs.h5"]), "Files to load") self.declareProperty('IPTS', Property.EMPTY_INT, "IPTS number to load from") self.declareProperty(IntArrayProperty("RunNumbers", []), 'Run numbers to load') # Normalization info (optional, skip normalization if not specified) # Priority: IPTS + RunNumber > Filename > NormalizationFile # NOTE: # The current convention for loading Vanadium data is by IPTS+RunNumber, so this is the default\ # -- default self.declareProperty('VanadiumIPTS', Property.EMPTY_INT, "IPTS number to load Vanadium normalization") self.declareProperty('VanadiumRunNumber', Property.EMPTY_INT, "Run number to load Vanadium normalization") # -- alternative self.declareProperty(FileProperty(name="VanadiumFile", defaultValue="", extensions=[".nxs"], direction=Direction.Input, action=FileAction.OptionalLoad), doc="File with Vanadium normalization scan data") # alternative self.declareProperty( IMDHistoWorkspaceProperty("VanadiumWorkspace", defaultValue="", direction=Direction.Input, optional=PropertyMode.Optional), doc="MDHisto workspace containing vanadium normalization data") # normalization method self.declareProperty( "NormalizedBy", 'None', StringListValidator(['None', 'Counts', 'Monitor', 'Time']), "Normalize to Counts, Monitor, Time.") # group normalization properties self.setPropertyGroup('VanadiumIPTS', 'Normalization') self.setPropertyGroup('VanadiumRunNumber', 'Normalization') self.setPropertyGroup('VanadiumFile', 'Normalization') self.setPropertyGroup('VanadiumWorkspace', 'Normalization') self.setPropertyGroup('NormalizedBy', 'Normalization') # Grouping info self.declareProperty( "Grouping", 'None', StringListValidator(['None', '2x2', '4x4']), "Group pixels (shared by input and normalization)") # Output workspace/data info self.declareProperty( WorkspaceProperty("OutputWorkspace", "", optional=PropertyMode.Mandatory, direction=Direction.Output), "Output Workspace")
def PyInit(self): self.declareProperty(ITableWorkspaceProperty('InputWorkspace', '', Direction.Input), doc='Input table workspace.') validator = IntArrayBoundedValidator(lower=0) self.declareProperty( IntArrayProperty('ColumnIndices', values=[], direction=Direction.Input, validator=validator), 'Comma separated list of column indices for which statistics will be separated') self.declareProperty(ITableWorkspaceProperty('OutputWorkspace', '', Direction.Output), doc='Output workspace containing column statistics.')
def PyInit(self): """Initialize the input and output properties of the algorithm.""" threeNonnegativeInts = CompositeValidator() threeNonnegativeInts.add(IntArrayLengthValidator(3)) nonnegativeInts = IntArrayBoundedValidator() nonnegativeInts.setLower(0) threeNonnegativeInts.add(nonnegativeInts) self.declareProperty( MatrixWorkspaceProperty( Prop.INPUT_WS, defaultValue='', direction=Direction.Input, validator=WorkspaceUnitValidator('Wavelength')), doc='An input workspace (units wavelength) to be integrated.') self.declareProperty( MatrixWorkspaceProperty(Prop.OUTPUT_WS, defaultValue='', direction=Direction.Output), doc='The integrated foreground divided by the summed direct beam.') self.declareProperty(Prop.SUBALG_LOGGING, defaultValue=SubalgLogging.OFF, validator=StringListValidator( [SubalgLogging.OFF, SubalgLogging.ON]), doc='Enable or disable child algorithm logging.') self.declareProperty( Prop.CLEANUP, defaultValue=common.WSCleanup.ON, validator=StringListValidator( [common.WSCleanup.ON, common.WSCleanup.OFF]), doc='Enable or disable intermediate workspace cleanup.') self.declareProperty(Prop.SUM_TYPE, defaultValue=SumType.IN_LAMBDA, validator=StringListValidator( [SumType.IN_LAMBDA, SumType.IN_Q]), doc='Type of summation to perform.') self.declareProperty( MatrixWorkspaceProperty( Prop.DIRECT_FOREGROUND_WS, defaultValue='', direction=Direction.Input, optional=PropertyMode.Optional, validator=WorkspaceUnitValidator('Wavelength')), doc= 'Summed direct beam workspace if output in reflectivity is required.' ) self.declareProperty( IntArrayProperty(Prop.FOREGROUND_INDICES, values=[ Property.EMPTY_INT, Property.EMPTY_INT, Property.EMPTY_INT ], validator=threeNonnegativeInts), doc= 'A three element array of foreground start, centre and end workspace indices.' )
def PyInit(self): self.declareProperty(MultipleFileProperty(name="Filename", action=FileAction.OptionalLoad, extensions=[".nxs.h5"]), "Files to load") self.declareProperty('IPTS', Property.EMPTY_INT, "IPTS number to load from") self.declareProperty(IntArrayProperty("RunNumbers", []), 'Run numbers to load') self.declareProperty("Grouping", 'None', StringListValidator(['None', '2x2', '4x4']), "Group pixels") self.declareProperty(WorkspaceProperty("OutputWorkspace", "", optional=PropertyMode.Mandatory, direction=Direction.Output), "Output Workspace")
def PyInit(self): self.declareProperty(name='Instrument', defaultValue='IRIS', validator=StringListValidator(['IRIS', 'OSIRIS']), doc='The name of the instrument.') self.declareProperty(name='Analyser', defaultValue='', validator=StringListValidator(['graphite', 'mica', 'fmica']), doc='The analyser bank used during run.') self.declareProperty(name='Reflection', defaultValue='', validator=StringListValidator(['002', '004', '006']), doc='Reflection number for instrument setup during run.') self.declareProperty(name="FirstRun", defaultValue=-1, validator=IntBoundedValidator(lower=0), doc="First Sample run-number.") self.declareProperty(name='LastRun', defaultValue=-1, validator=IntBoundedValidator(lower=0), doc="Last Sample run-number.") self.declareProperty(name='NumberSamples', defaultValue=-1, validator=IntBoundedValidator(lower=0), doc="Increment for run-number.") self.declareProperty(IntArrayProperty(name='SpectraRange', values=[0, 1], validator=IntArrayLengthValidator(2)), doc='Comma separated range of spectra numbers to use.') self.declareProperty(FloatArrayProperty(name='ElasticRange', validator=FloatArrayLengthValidator(2)), doc='Energy range for the elastic component.') self.declareProperty(FloatArrayProperty(name='InelasticRange', validator=FloatArrayLengthValidator(2)), doc='Energy range for the inelastic component.') self.declareProperty(FloatArrayProperty(name='TotalRange', validator=FloatArrayLengthValidator(2)), doc='Energy range for the total energy component.') self.declareProperty(name='SampleEnvironmentLogName', defaultValue='Position', doc='Name of the sample environment log entry') sample_environment_log_values = ['last_value', 'average'] self.declareProperty('SampleEnvironmentLogValue', 'last_value', StringListValidator(sample_environment_log_values), doc='Value selection of the sample environment log entry') self.declareProperty(name='MSDFit', defaultValue=False, doc='Perform an MSDFit. Do not use with GroupingMethod as "All"') self.declareProperty(name='WidthFit', defaultValue=False, doc='Perform a 2 peak width Fit. Do not use with GroupingMethod as "All"') self.declareProperty(name='Plot', defaultValue=False, doc='True to plot the output data.') self.declareProperty(name='Save', defaultValue=False, doc='True to save the output data.')
def _parseBTPlist(self, value, min_value): if len(value) == 0: return list() # empty list means use everything else: # let IntArrayProperty do the work and make sure that the result is valid prop = IntArrayProperty(name='temp', values=value) validationMsg = prop.isValid if validationMsg: raise RuntimeError(validationMsg) result = prop.value if len(result) == 0: raise RuntimeError('Could not generate values from "{}"'.format(value)) return result - min_value
def PyInit(self): self.declareProperty(MatrixWorkspaceProperty('InputWorkspace', '', Direction.Input), doc='Sample to run with') self.declareProperty(IntArrayProperty(name='SpectraRange'), doc='Range of spectra to symmetrise (defaults to entire range if not set)') self.declareProperty('XMin', 0.0, doc='X value marking lower limit of curve to copy') self.declareProperty('XMax', 0.0, doc='X value marking upper limit of curve to copy') self.declareProperty(MatrixWorkspaceProperty('OutputWorkspace', '',\ Direction.Output), doc='Name to call the output workspace.') self.declareProperty(ITableWorkspaceProperty('OutputPropertiesTable', '', Direction.Output, PropertyMode.Optional), doc='Name to call the properties output table workspace.')
def _getRuns(self, rlist, doIndiv=True): """ Create sets of run numbers for analysis. A semicolon indicates a separate group of runs to be processed together. :param rlist: string containing all the run numbers to be reduced. :return: if doIndiv is False, return a list of IntArrayProperty objects. Each item is a pseudolist containing a set of runs to be reduced together. if doIndiv is True, return a list of strings, each string is a run number. """ run_list = [] # ";" separates the runs into substrings. Each substring represents a set of runs rlvals = rlist.split(';') for rlval in rlvals: iap = IntArrayProperty('', rlval) # split the substring if doIndiv: run_list.extend([[x] for x in iap.value]) else: run_list.append(iap.value) return run_list
def PyInit(self): self.declareProperty(MatrixWorkspaceProperty('Sample', '', Direction.Input), doc='Sample to run with') self.declareProperty(IntArrayProperty(name='SpectraRange'), doc='Range of spectra to symmetrise (defaults to entire range if not set)') self.declareProperty('XMin', 0.0, doc='X value marking lower limit of curve to copy') self.declareProperty('XMax', 0.0, doc='X value marking upper limit of curve to copy') self.declareProperty('Verbose', defaultValue=False, doc='Switch verbose output Off/On') self.declareProperty('Plot', defaultValue=False, doc='Switch plotting Off/On') self.declareProperty('Save', defaultValue=False, doc='Switch saving result to nxs file Off/On') self.declareProperty(MatrixWorkspaceProperty('OutputWorkspace', '', Direction.Output), doc='Name to call the output workspace.') self.declareProperty(ITableWorkspaceProperty('OutputPropertiesTable', '', Direction.Output, PropertyMode.Optional), doc='Name to call the properties output table workspace.')
def PyInit(self): self.declareProperty( mantid.api.WorkspaceProperty('InputWorkspace', '', mantid.kernel.Direction.Input), 'Workspace to plot') self.declareProperty(mantid.api.FileProperty( 'OutputFilename', '', action=mantid.api.FileAction.OptionalSave, extensions=['.png']), doc='Name of the image file to savefile.') have_plotly = importlib.util.find_spec("plotly") is not None if have_plotly: outputTypes = ['image', 'plotly', 'plotly-full'] else: outputTypes = ['image'] self.declareProperty('OutputType', 'image', StringListValidator(outputTypes), 'Method for rendering plot') self.declareProperty( 'XLabel', '', 'Label on the X axis. If empty, it will be taken from workspace') self.declareProperty( 'YLabel', '', 'Label on the Y axis. If empty, it will be taken from workspace') self.declareProperty( IntArrayProperty('SpectraList', [], direction=Direction.Input), 'Which spectra to plot') self.declareProperty( StringArrayProperty('SpectraNames', [], direction=Direction.Input), 'Override with custom names for spectra') self.declareProperty('Result', '', Direction.Output) self.declareProperty( 'PopCanvas', False, 'If true, a Matplotlib canvas will be popped out ' ', which contains the saved plot.')
def PyInit(self): self.declareProperty( IntArrayProperty("Input", Direction.Input), "Run numbers")
def PyInit(self): validator = IntArrayBoundedValidator() validator.setLower(0) self.declareProperty( IntArrayProperty("RunNumbers", values=[0], direction=Direction.Input, validator=validator), "Run numbers to process, comma separated") self.declareProperty( "LiveData", False, "Read live data - requires a saved run in the current IPTS " + "with the same Instrument configuration as the live run") mask = [ "None", "Horizontal", "Vertical", "Masking Workspace", "Custom - xml masking file" ] self.declareProperty("Masking", "None", StringListValidator(mask), "Mask to be applied to the data") self.declareProperty( WorkspaceProperty("MaskingWorkspace", "", Direction.Input, PropertyMode.Optional), "The workspace containing the mask.") self.declareProperty(FileProperty(name="MaskingFilename", defaultValue="", direction=Direction.Input, action=FileAction.OptionalLoad), doc="The file containing the xml mask.") self.declareProperty( name="Calibration", defaultValue="Convert Units", validator=StringListValidator( ['Convert Units', 'Calibration File', 'DetCal File']), direction=Direction.Input, doc="The type of conversion to d_spacing to be used.") self.declareProperty( FileProperty(name="CalibrationFilename", defaultValue="", direction=Direction.Input, action=FileAction.OptionalLoad), doc="The calibration file to convert to d_spacing.") self.declareProperty( MultipleFileProperty(name='DetCalFilename', extensions=['.detcal'], action=FileAction.OptionalLoad), 'ISAW DetCal file') self.declareProperty( FloatArrayProperty("Binning", [0.5, -0.004, 7.0]), "Min, Step, and Max of d-space bins. Logarithmic binning is used if Step is negative." ) nor_corr = [ "None", "From Workspace", "From Processed Nexus", "Extracted from Data" ] self.declareProperty( "Normalization", "None", StringListValidator(nor_corr), "If needed what type of input to use as normalization, Extracted from " + "Data uses a background determination that is peak independent.This " + "implemantation can be tested in algorithm SNAP Peak Clipping Background" ) self.declareProperty( FileProperty(name="NormalizationFilename", defaultValue="", direction=Direction.Input, action=FileAction.OptionalLoad), doc="The file containing the processed nexus for normalization.") self.declareProperty( WorkspaceProperty("NormalizationWorkspace", "", Direction.Input, PropertyMode.Optional), "The workspace containing the normalization data.") self.declareProperty( "PeakClippingWindowSize", 10, "Read live data - requires a saved run in the current " + "IPTS with the same Instrumnet configuration") self.declareProperty( "SmoothingRange", 10, "Read live data - requires a saved run in the " + "current IPTS with the same Instrumnet configuration") grouping = ["All", "Column", "Banks", "Modules", "2_4 Grouping"] self.declareProperty( "GroupDetectorsBy", "All", StringListValidator(grouping), "Detector groups to use for future focussing: " + "All detectors as one group, Groups (East,West for " + "SNAP), Columns for SNAP, detector banks") mode = ["Set-Up", "Production"] self.declareProperty( "ProcessingMode", "Production", StringListValidator(mode), "Set-Up Mode is used for establishing correct parameters. Production " + "Mode only Normalized workspace is kept for each run.") self.declareProperty( name="OptionalPrefix", defaultValue="", direction=Direction.Input, doc="Optional Prefix to be added to workspaces and output filenames" ) self.declareProperty( "SaveData", False, "Save data in the following formats: Ascii- " + "d-spacing ,Nexus Processed,GSAS and Fullprof") self.declareProperty(FileProperty(name="OutputDirectory", defaultValue="", action=FileAction.OptionalDirectory), doc='Default value is proposal shared directory')
def PyInit(self): """Initialize the input and output properties of the algorithm.""" nonnegativeInt = IntBoundedValidator(lower=0) wsIndexRange = IntBoundedValidator(lower=0, upper=255) nonnegativeIntArray = IntArrayBoundedValidator(lower=0) maxTwoNonnegativeInts = CompositeValidator() maxTwoNonnegativeInts.add(IntArrayLengthValidator(lenmin=0, lenmax=2)) maxTwoNonnegativeInts.add(nonnegativeIntArray) self.declareProperty(MultipleFileProperty( Prop.RUN, action=FileAction.OptionalLoad, extensions=['nxs']), doc='A list of input run numbers/files.') self.declareProperty( MatrixWorkspaceProperty(Prop.INPUT_WS, defaultValue='', direction=Direction.Input, validator=WorkspaceUnitValidator('TOF'), optional=PropertyMode.Optional), doc='An input workspace (units TOF) if no Run is specified.') self.declareProperty( MatrixWorkspaceProperty(Prop.OUTPUT_WS, defaultValue='', direction=Direction.Output), doc= 'The preprocessed output workspace (unit wavelength), single histogram.' ) self.declareProperty( Prop.TWO_THETA, defaultValue=Property.EMPTY_DBL, doc='A user-defined scattering angle 2 theta (unit degrees).') self.declareProperty( name=Prop.LINE_POSITION, defaultValue=Property.EMPTY_DBL, doc= 'A workspace index corresponding to the beam centre between 0.0 and 255.0.' ) self.declareProperty(MatrixWorkspaceProperty( Prop.DIRECT_LINE_WORKSPACE, defaultValue='', direction=Direction.Input, optional=PropertyMode.Optional), doc='A pre-processed direct beam workspace.') self.declareProperty(Prop.SUBALG_LOGGING, defaultValue=SubalgLogging.OFF, validator=StringListValidator( [SubalgLogging.OFF, SubalgLogging.ON]), doc='Enable or disable child algorithm logging.') self.declareProperty( Prop.CLEANUP, defaultValue=utils.Cleanup.ON, validator=StringListValidator( [utils.Cleanup.ON, utils.Cleanup.OFF]), doc='Enable or disable intermediate workspace cleanup.') self.declareProperty(MatrixWorkspaceProperty( Prop.WATER_REFERENCE, defaultValue='', direction=Direction.Input, validator=WorkspaceUnitValidator("TOF"), optional=PropertyMode.Optional), doc='A (water) calibration workspace (unit TOF).') self.declareProperty(Prop.SLIT_NORM, defaultValue=SlitNorm.OFF, validator=StringListValidator( [SlitNorm.OFF, SlitNorm.ON]), doc='Enable or disable slit normalisation.') self.declareProperty(Prop.FLUX_NORM_METHOD, defaultValue=FluxNormMethod.TIME, validator=StringListValidator([ FluxNormMethod.TIME, FluxNormMethod.MONITOR, FluxNormMethod.OFF ]), doc='Neutron flux normalisation method.') self.declareProperty( IntArrayProperty(Prop.FOREGROUND_HALF_WIDTH, validator=maxTwoNonnegativeInts), doc= 'Number of foreground pixels at lower and higher angles from the centre pixel.' ) self.declareProperty( Prop.BKG_METHOD, defaultValue=BkgMethod.CONSTANT, validator=StringListValidator( [BkgMethod.CONSTANT, BkgMethod.LINEAR, BkgMethod.OFF]), doc='Flat background calculation method for background subtraction.' ) self.declareProperty( Prop.LOW_BKG_OFFSET, defaultValue=7, validator=nonnegativeInt, doc= 'Distance of flat background region towards smaller detector angles from the ' + 'foreground centre, in pixels.') self.declareProperty( Prop.LOW_BKG_WIDTH, defaultValue=5, validator=nonnegativeInt, doc= 'Width of flat background region towards smaller detector angles from the ' + 'foreground centre, in pixels.') self.declareProperty( Prop.HIGH_BKG_OFFSET, defaultValue=7, validator=nonnegativeInt, doc= 'Distance of flat background region towards larger detector angles from the ' + 'foreground centre, in pixels.') self.declareProperty( Prop.HIGH_BKG_WIDTH, defaultValue=5, validator=nonnegativeInt, doc= 'Width of flat background region towards larger detector angles from the ' + 'foreground centre, in pixels.') self.declareProperty( Prop.START_WS_INDEX, validator=wsIndexRange, defaultValue=0, doc='Start workspace index used for peak fitting.') self.declareProperty(Prop.END_WS_INDEX, validator=wsIndexRange, defaultValue=255, doc='Last workspace index used for peak fitting.') self.declareProperty( Prop.XMIN, defaultValue=Property.EMPTY_DBL, doc='Minimum x value (unit Angstrom) used for peak fitting.') self.declareProperty( Prop.XMAX, defaultValue=Property.EMPTY_DBL, doc='Maximum x value (unit Angstrom) used for peak fitting.')
def PyInit(self): positiveFloat = FloatBoundedValidator(0., exclusive=False) validRebinParams = RebinParamsValidator(AllowEmpty=True) orderedPairsValidator = FloatArrayOrderedPairsValidator() self.declareProperty( WorkspaceGroupProperty('OutputWorkspace', '', direction=Direction.Output), doc='The output workspace group containing reduced data.') self.declareProperty(MultipleFileProperty('Runs', action=FileAction.Load, extensions=['nxs']), doc='Run(s) to be processed.') processes = ['Cadmium', 'Empty', 'Vanadium', 'Sample'] self.declareProperty(name='ProcessAs', defaultValue='Sample', validator=StringListValidator(processes), doc='Choose the process type.') reduction_options = ['Powder', 'SingleCrystal'] self.declareProperty( name='ReductionType', defaultValue='Powder', validator=StringListValidator(reduction_options), doc='Choose the appropriate reduction type for the data to process.' ) self.declareProperty( 'VanadiumWorkspace', '', doc='File(s) or workspaces containing vanadium data.') self.declareProperty('EmptyContainerWorkspace', '', doc='Empty container workspace.') self.declareProperty('EmptyContainerScaling', 1.0, doc='Scaling factor for the empty container.') self.declareProperty(WorkspaceGroupProperty( 'CadmiumWorkspace', '', direction=Direction.Input, optional=PropertyMode.Optional), doc='Cadmium absorber workspace.') self.copyProperties( 'DirectILLCollectData', [common.PROP_FLAT_BKG, common.PROP_FLAT_BKG_WINDOW]) self.declareProperty( 'FlatBackgroundSource', "", doc= 'File(s) or workspaces containing the source to calculate flat background.' ) self.copyProperties( 'DirectILLCollectData', [common.PROP_FLAT_BKG_SCALING, common.PROP_OUTPUT_FLAT_BKG_WS]) self.copyProperties('DirectILLReduction', common.PROP_ABSOLUTE_UNITS) additional_inputs_group = 'Corrections' self.setPropertyGroup('VanadiumWorkspace', additional_inputs_group) self.setPropertyGroup('EmptyContainerWorkspace', additional_inputs_group) self.setPropertyGroup('EmptyContainerScaling', additional_inputs_group) self.setPropertyGroup('CadmiumWorkspace', additional_inputs_group) self.setPropertyGroup(common.PROP_FLAT_BKG, additional_inputs_group) self.setPropertyGroup(common.PROP_FLAT_BKG_WINDOW, additional_inputs_group) self.setPropertyGroup('FlatBackgroundSource', additional_inputs_group) self.setPropertyGroup(common.PROP_FLAT_BKG_SCALING, additional_inputs_group) self.setPropertyGroup(common.PROP_OUTPUT_FLAT_BKG_WS, additional_inputs_group) self.setPropertyGroup(common.PROP_ABSOLUTE_UNITS, additional_inputs_group) self.copyProperties( 'DirectILLCollectData', [common.PROP_NORMALISATION, common.PROP_MON_PEAK_SIGMA_MULTIPLIER]) self.copyProperties('DirectILLCollectData', common.PROP_INCIDENT_ENERGY_CALIBRATION) self.declareProperty( name='IncidentEnergy', defaultValue=0.0, validator=positiveFloat, doc='Value for the calibrated incident energy (meV).') self.copyProperties( 'DirectILLCollectData', [common.PROP_ELASTIC_CHANNEL_MODE, common.PROP_EPP_METHOD]) self.declareProperty( name='ElasticChannelIndex', defaultValue=0.0, validator=positiveFloat, doc= 'Bin index value for the centre of the elastic peak. Can be a float.' ) self.declareProperty( 'SampleAngleOffset', 0.0, doc='Value for the offset parameter in omega scan (degrees).') calibration_group = 'Calibration' self.setPropertyGroup(common.PROP_INCIDENT_ENERGY_CALIBRATION, calibration_group) self.setPropertyGroup('IncidentEnergy', calibration_group) self.setPropertyGroup('ElasticChannelIndex', calibration_group) self.setPropertyGroup(common.PROP_ELASTIC_CHANNEL_MODE, calibration_group) self.setPropertyGroup(common.PROP_EPP_METHOD, calibration_group) self.setPropertyGroup('SampleAngleOffset', calibration_group) # The mask workspace replaces MaskWorkspace parameter from PantherSingle and DiagnosticsWorkspace from directred self.declareProperty('MaskWorkspace', '', doc='File(s) or workspaces containing the mask.') self.declareProperty(IntArrayProperty(name='MaskedTubes', values=[], direction=Direction.Input), doc='List of tubes to be masked.') self.declareProperty( 'MaskThresholdMin', 0.0, doc='Threshold level below which bins will be masked' ' to remove empty / background pixels.') self.declareProperty( 'MaskThresholdMax', 0.0, doc='Threshold level above which bins will be masked' ' to remove noisy pixels.') self.declareProperty(FloatArrayProperty( name='MaskedAngles', values=[], validator=orderedPairsValidator), doc='Mask detectors in the given angular range.') self.declareProperty( 'MaskWithVanadium', True, doc='Whether to mask using vanadium diagnostics workspace.') masking_group_name = 'Masking' self.setPropertyGroup('MaskWorkspace', masking_group_name) self.setPropertyGroup('MaskedTubes', masking_group_name) self.setPropertyGroup('MaskThresholdMin', masking_group_name) self.setPropertyGroup('MaskThresholdMax', masking_group_name) self.setPropertyGroup('MaskedAngles', masking_group_name) self.setPropertyGroup('MaskWithVanadium', masking_group_name) self.copyProperties( 'DirectILLReduction', [common.PROP_REBINNING_W, common.PROP_REBINNING_PARAMS_W]) self.declareProperty(FloatArrayProperty(name='MomentumTransferBinning', validator=validRebinParams), doc='Momentum transfer binning parameters.') rebinning_group = 'Binning parameters' self.setPropertyGroup(common.PROP_REBINNING_W, rebinning_group) self.setPropertyGroup(common.PROP_REBINNING_PARAMS_W, rebinning_group) self.setPropertyGroup('MomentumTransferBinning', rebinning_group) self.declareProperty( name='AbsorptionCorrection', defaultValue='None', validator=StringListValidator(['None', 'Fast', 'Full']), doc='Choice of approach to absorption correction.') self.declareProperty( name='SelfAttenuationMethod', defaultValue='MonteCarlo', validator=StringListValidator(['Numerical', 'MonteCarlo']), doc='Choice of calculation method for the attenuation calculation.' ) self.declareProperty(PropertyManagerProperty('SampleMaterial'), doc='Sample material definitions.') self.declareProperty(PropertyManagerProperty('SampleGeometry'), doc="Dictionary for the sample geometry.") self.declareProperty(PropertyManagerProperty('ContainerMaterial'), doc='Container material definitions.') self.declareProperty(PropertyManagerProperty('ContainerGeometry'), doc="Dictionary for the container geometry.") attenuation_group = 'Sample attenuation' self.setPropertyGroup('AbsorptionCorrection', attenuation_group) self.setPropertyGroup('SelfAttenuationMethod', attenuation_group) self.setPropertyGroup('SampleMaterial', attenuation_group) self.setPropertyGroup('SampleGeometry', attenuation_group) self.setPropertyGroup('ContainerMaterial', attenuation_group) self.setPropertyGroup('ContainerGeometry', attenuation_group) self.declareProperty( name=common.PROP_DET_GROUPING, defaultValue="", doc='Grouping pattern to reduce the granularity of the output.') self.declareProperty( name=common.PROP_DET_GROUPING_BY, defaultValue=1, doc= 'Step to use when grouping detectors to reduce the granularity of the output.' ) self.copyProperties( 'DirectILLCollectData', [common.PROP_DET_HOR_GROUPING, common.PROP_DET_VER_GROUPING]) self.declareProperty( name="ApplyGroupingBy", defaultValue=False, doc= 'Whether to apply the pixel grouping horizontally or vertically to the data, and not' ' only to increase the statistics of the flat background calculation.' ) self.declareProperty( name=common.PROP_GROUPING_ANGLE_STEP, defaultValue=0.0, validator=positiveFloat, doc= 'A scattering angle step to which to group detectors, in degrees.') self.declareProperty( name='GroupingBehaviour', defaultValue="Sum", validator=StringListValidator(['Sum', 'Average']), doc='Defines which behaviour should be used when grouping pixels.') grouping_options_group = 'Grouping options' self.setPropertyGroup(common.PROP_DET_GROUPING, grouping_options_group) self.setPropertyGroup(common.PROP_DET_GROUPING_BY, grouping_options_group) self.setPropertyGroup(common.PROP_DET_HOR_GROUPING, grouping_options_group) self.setPropertyGroup(common.PROP_DET_VER_GROUPING, grouping_options_group) self.setPropertyGroup('ApplyGroupingBy', grouping_options_group) self.setPropertyGroup(common.PROP_GROUPING_ANGLE_STEP, grouping_options_group) self.setPropertyGroup('GroupingBehaviour', grouping_options_group) self.declareProperty( name="SaveOutput", defaultValue=True, doc="Whether to save the output directly after processing.") self.declareProperty(name='ClearCache', defaultValue=False, doc='Whether to clear intermediate workspaces.')
def PyInit(self): # Input properties self.declareProperty(StringArrayProperty(name='InputFiles'), doc='Comma separated list of input files') self.declareProperty(name='SumFiles', defaultValue=False, doc='Toggle input file summing or sequential processing') self.declareProperty(name='LoadLogFiles', defaultValue=True, doc='Load log files when loading runs') self.declareProperty(WorkspaceProperty('CalibrationWorkspace', '', direction=Direction.Input, optional=PropertyMode.Optional), doc='Workspace containing calibration data') # Instrument configuration properties self.declareProperty(name='Instrument', defaultValue='', validator=StringListValidator(['IRIS', 'OSIRIS', 'TOSCA', 'TFXA']), doc='Instrument used during run.') self.declareProperty(name='Analyser', defaultValue='', validator=StringListValidator(['graphite', 'mica', 'fmica']), doc='Analyser bank used during run.') self.declareProperty(name='Reflection', defaultValue='', validator=StringListValidator(['002', '004', '006']), doc='Reflection number for instrument setup during run.') self.declareProperty(name='Efixed', defaultValue=Property.EMPTY_DBL, validator=FloatBoundedValidator(0.0), doc='Overrides the default Efixed value for the analyser/reflection selection.') self.declareProperty(IntArrayProperty(name='SpectraRange', values=[0, 1], validator=IntArrayMandatoryValidator()), doc='Comma separated range of spectra number to use.') self.declareProperty(FloatArrayProperty(name='BackgroundRange'), doc='Range of background to subtract from raw data in time of flight.') self.declareProperty(name='RebinString', defaultValue='', doc='Rebin string parameters.') self.declareProperty(name='DetailedBalance', defaultValue=Property.EMPTY_DBL, doc='') self.declareProperty(name='ScaleFactor', defaultValue=1.0, doc='Factor by which to scale result.') self.declareProperty(name='FoldMultipleFrames', defaultValue=True, doc='Folds multiple framed data sets into a single workspace.') # Spectra grouping options self.declareProperty(name='GroupingMethod', defaultValue='IPF', validator=StringListValidator(['Individual', 'All', 'File', 'Workspace', 'IPF', 'Custom']), doc='Method used to group spectra.') self.declareProperty(WorkspaceProperty('GroupingWorkspace', '', direction=Direction.Input, optional=PropertyMode.Optional), doc='Workspace containing spectra grouping.') self.declareProperty(name='GroupingString', defaultValue='', direction=Direction.Input, doc='Spectra to group as string') self.declareProperty(FileProperty('MapFile', '', action=FileAction.OptionalLoad, extensions=['.map']), doc='Workspace containing spectra grouping.') # Output properties self.declareProperty(name='UnitX', defaultValue='DeltaE', validator=StringListValidator(['DeltaE', 'DeltaE_inWavenumber']), doc='X axis units for the result workspace.') self.declareProperty(WorkspaceGroupProperty('OutputWorkspace', '', direction=Direction.Output), doc='Workspace group for the resulting workspaces.')
def PyInit(self): """Initialize the algorithm's input and output properties.""" PROPGROUP_BEAM_STOP_DIAGNOSTICS = 'Beam Stop Diagnostics' PROPGROUP_BKG_DIAGNOSTICS = 'Background Diagnostics' PROPGROUP_PEAK_DIAGNOSTICS = 'Elastic Peak Diagnostics' PROPGROUP_USER_MASK = 'Additional Masking' greaterThanUnityFloat = FloatBoundedValidator(lower=1) inputWorkspaceValidator = CompositeValidator() inputWorkspaceValidator.add(InstrumentValidator()) inputWorkspaceValidator.add(WorkspaceUnitValidator('TOF')) positiveFloat = FloatBoundedValidator(lower=0) positiveIntArray = IntArrayBoundedValidator() positiveIntArray.setLower(0) scalingFactor = FloatBoundedValidator(lower=0, upper=1) # Properties. self.declareProperty( MatrixWorkspaceProperty(name=common.PROP_INPUT_WS, defaultValue='', validator=inputWorkspaceValidator, direction=Direction.Input), doc= "A 'raw' workspace from DirectILLCollectData to calculate the diagnostics from." ) self.declareProperty(WorkspaceProperty(name=common.PROP_OUTPUT_WS, defaultValue='', direction=Direction.Output), doc='A diagnostics mask workspace.') self.declareProperty(name=common.PROP_CLEANUP_MODE, defaultValue=common.CLEANUP_ON, validator=StringListValidator( [common.CLEANUP_ON, common.CLEANUP_OFF]), direction=Direction.Input, doc='What to do with intermediate workspaces.') self.declareProperty( name=common.PROP_SUBALG_LOGGING, defaultValue=common.SUBALG_LOGGING_OFF, validator=StringListValidator( [common.SUBALG_LOGGING_OFF, common.SUBALG_LOGGING_ON]), direction=Direction.Input, doc='Enable or disable subalgorithms to ' + 'print in the logs.') self.declareProperty( ITableWorkspaceProperty(name=common.PROP_EPP_WS, defaultValue='', direction=Direction.Input, optional=PropertyMode.Optional), doc='Table workspace containing results from the FindEPP algorithm.' ) self.declareProperty(name=common.PROP_ELASTIC_PEAK_DIAGNOSTICS, defaultValue=common.ELASTIC_PEAK_DIAGNOSTICS_AUTO, validator=StringListValidator([ common.ELASTIC_PEAK_DIAGNOSTICS_AUTO, common.ELASTIC_PEAK_DIAGNOSTICS_ON, common.ELASTIC_PEAK_DIAGNOSTICS_OFF ]), direction=Direction.Input, doc='Enable or disable elastic peak diagnostics.') self.setPropertyGroup(common.PROP_ELASTIC_PEAK_DIAGNOSTICS, PROPGROUP_PEAK_DIAGNOSTICS) self.declareProperty( name=common.PROP_ELASTIC_PEAK_SIGMA_MULTIPLIER, defaultValue=3.0, validator=positiveFloat, direction=Direction.Input, doc="Integration half width of the elastic peak in multiples " + " of 'Sigma' in the EPP table.") self.setPropertyGroup(common.PROP_ELASTIC_PEAK_SIGMA_MULTIPLIER, PROPGROUP_PEAK_DIAGNOSTICS) self.declareProperty(name=common.PROP_PEAK_DIAGNOSTICS_LOW_THRESHOLD, defaultValue=Property.EMPTY_DBL, validator=positiveFloat, direction=Direction.Input, doc='Multiplier for lower acceptance limit ' + 'used in elastic peak diagnostics.') self.setPropertyGroup(common.PROP_PEAK_DIAGNOSTICS_LOW_THRESHOLD, PROPGROUP_PEAK_DIAGNOSTICS) self.declareProperty(name=common.PROP_PEAK_DIAGNOSTICS_HIGH_THRESHOLD, defaultValue=Property.EMPTY_DBL, validator=greaterThanUnityFloat, direction=Direction.Input, doc='Multiplier for higher acceptance limit ' + 'used in elastic peak diagnostics.') self.setPropertyGroup(common.PROP_PEAK_DIAGNOSTICS_HIGH_THRESHOLD, PROPGROUP_PEAK_DIAGNOSTICS) self.declareProperty( name=common.PROP_PEAK_DIAGNOSTICS_SIGNIFICANCE_TEST, defaultValue=Property.EMPTY_DBL, validator=positiveFloat, direction=Direction.Input, doc= 'To fail the elastic peak diagnostics, the intensity must also exceed ' + 'this number of error bars with respect to the median intensity.') self.setPropertyGroup(common.PROP_PEAK_DIAGNOSTICS_SIGNIFICANCE_TEST, PROPGROUP_PEAK_DIAGNOSTICS) self.declareProperty(name=common.PROP_BKG_DIAGNOSTICS, defaultValue=common.BKG_DIAGNOSTICS_AUTO, validator=StringListValidator([ common.BKG_DIAGNOSTICS_AUTO, common.BKG_DIAGNOSTICS_ON, common.BKG_DIAGNOSTICS_OFF ]), direction=Direction.Input, doc='Control the background diagnostics.') self.setPropertyGroup(common.PROP_BKG_DIAGNOSTICS, PROPGROUP_BKG_DIAGNOSTICS) self.declareProperty( name=common.PROP_BKG_SIGMA_MULTIPLIER, defaultValue=10.0, validator=positiveFloat, direction=Direction.Input, doc= "Width of the range excluded from background integration around " + "the elastic peaks in multiplies of 'Sigma' in the EPP table") self.setPropertyGroup(common.PROP_BKG_SIGMA_MULTIPLIER, PROPGROUP_BKG_DIAGNOSTICS) self.declareProperty(name=common.PROP_BKG_DIAGNOSTICS_LOW_THRESHOLD, defaultValue=Property.EMPTY_DBL, validator=positiveFloat, direction=Direction.Input, doc='Multiplier for lower acceptance limit ' + 'used in noisy background diagnostics.') self.setPropertyGroup(common.PROP_BKG_DIAGNOSTICS_LOW_THRESHOLD, PROPGROUP_BKG_DIAGNOSTICS) self.declareProperty(name=common.PROP_BKG_DIAGNOSTICS_HIGH_THRESHOLD, defaultValue=Property.EMPTY_DBL, validator=greaterThanUnityFloat, direction=Direction.Input, doc='Multiplier for higher acceptance limit ' + 'used in noisy background diagnostics.') self.setPropertyGroup(common.PROP_BKG_DIAGNOSTICS_HIGH_THRESHOLD, PROPGROUP_BKG_DIAGNOSTICS) self.declareProperty( name=common.PROP_BKG_DIAGNOSTICS_SIGNIFICANCE_TEST, defaultValue=Property.EMPTY_DBL, validator=positiveFloat, direction=Direction.Input, doc= 'To fail the background diagnostics, the background level must also exceed ' + 'this number of error bars with respect to the median level.') self.setPropertyGroup(common.PROP_BKG_DIAGNOSTICS_SIGNIFICANCE_TEST, PROPGROUP_BKG_DIAGNOSTICS) self.declareProperty(name=common.PROP_BEAM_STOP_DIAGNOSTICS, defaultValue=common.BEAM_STOP_DIAGNOSTICS_AUTO, validator=StringListValidator([ common.BEAM_STOP_DIAGNOSTICS_AUTO, common.BEAM_STOP_DIAGNOSTICS_ON, common.BEAM_STOP_DIAGNOSTICS_OFF ]), direction=Direction.Input, doc='Control the beam stop diagnostics.') self.setPropertyGroup(common.PROP_BEAM_STOP_DIAGNOSTICS, PROPGROUP_BEAM_STOP_DIAGNOSTICS) self.declareProperty( name=common.PROP_BEAM_STOP_THRESHOLD, defaultValue=0.67, validator=scalingFactor, direction=Direction.Input, doc= 'Multiplier for the lower acceptance limit for beam stop diagnostics.' ) self.setPropertyGroup(common.PROP_BEAM_STOP_THRESHOLD, PROPGROUP_BEAM_STOP_DIAGNOSTICS) self.declareProperty( name=common.PROP_DEFAULT_MASK, defaultValue=common.DEFAULT_MASK_ON, validator=StringListValidator( [common.DEFAULT_MASK_ON, common.DEFAULT_MASK_OFF]), direction=Direction.Input, doc='Enable or disable instrument specific default mask.') self.declareProperty(IntArrayProperty(name=common.PROP_USER_MASK, values='', validator=positiveIntArray, direction=Direction.Input), doc='List of spectra to mask.') self.setPropertyGroup(common.PROP_USER_MASK, PROPGROUP_USER_MASK) self.declareProperty(StringArrayProperty( name=common.PROP_USER_MASK_COMPONENTS, values='', direction=Direction.Input), doc='List of instrument components to mask.') self.setPropertyGroup(common.PROP_USER_MASK_COMPONENTS, PROPGROUP_USER_MASK) # Rest of the output properties self.declareProperty( ITableWorkspaceProperty( name=common.PROP_OUTPUT_DIAGNOSTICS_REPORT_WS, defaultValue='', direction=Direction.Output, optional=PropertyMode.Optional), doc='Output table workspace for detector diagnostics reporting.') self.setPropertyGroup(common.PROP_OUTPUT_DIAGNOSTICS_REPORT_WS, common.PROPGROUP_OPTIONAL_OUTPUT) self.declareProperty(name=common.PROP_OUTPUT_DIAGNOSTICS_REPORT, defaultValue='', direction=Direction.Output, doc='Diagnostics report as a string.') self.setPropertyGroup(common.PROP_OUTPUT_DIAGNOSTICS_REPORT, common.PROPGROUP_OPTIONAL_OUTPUT)
def create_spectrum_list_from_string(str_list): array = IntArrayProperty('var', str_list).value int_list = [int(i) for i in array] # cast int32 to int return int_list