Beispiel #1
0
    def makeFiles(self):
        simpleapi.CreateWorkspace(OutputWorkspace='data',
                                  DataX='1,2,3,4,5',
                                  DataY='1,0,1,4,4',
                                  DataE='1,0,1,2,2')
        simpleapi.CreateWorkspace(OutputWorkspace='sim',
                                  DataX='1,2,3,4,5',
                                  DataY='1,1,1,1,1',
                                  DataE='0,0,0,0,0')
        simpleapi.CreateWorkspace(OutputWorkspace='simwrong',
                                  DataX='1,2,3,4',
                                  DataY='1,1,1,1',
                                  DataE='0,0,0,0')

        self.datafile = os.path.join(config.getString('defaultsave.directory'),
                                     'DakotaChiSquared_data.nxs')
        self.simfile = os.path.join(config.getString('defaultsave.directory'),
                                    'DakotaChiSquared_sim.nxs')
        self.simwrongfile = os.path.join(
            config.getString('defaultsave.directory'),
            'DakotaChiSquared_simwrong.nxs')
        self.chifile = os.path.join(config.getString('defaultsave.directory'),
                                    'DakotaChiSquared_chi.txt')

        simpleapi.SaveNexus('data', self.datafile)
        simpleapi.SaveNexus('sim', self.simfile)
        simpleapi.SaveNexus('simwrong', self.simwrongfile)

        ads = AnalysisDataServiceImpl.Instance()
        ads.remove("data")
        ads.remove("sim")
        ads.remove("simwrong")
def iliad_maps_setup():
	
	# where to save resutls (usually specified in Mantid, data search directories)
	save_dir = config.getString('defaultsave.directory')
	if len(save_dir) ==0 :
		config['defaultsave.directory']=os.getcwd()
		save_dir = config.getString('defaultsave.directory')
    
	print "Data will be saved into: ",save_dir
	# map mask and cal file, again the values from Mantid, data search directories can be modified here
	config.appendDataSearchDir('/home/maps/mprogs/InstrumentFiles/maps') 
	# data (raw or nxs) run files -- values from data search directories can be modified here
	config.appendDataSearchDir('/isisdatar55/NDXMAPS/Instrument/data/cycle_14_2') 
Beispiel #3
0
class StringToPngTest(unittest.TestCase):
    plotfile = os.path.join(config.getString('defaultsave.directory'),
                            "StringToPngTest.png")

    def cleanup(self):
        if os.path.exists(self.plotfile):
            os.remove(self.plotfile)

    def testPlot(self):
        to_plot = 'This is a string\nAnd this is a second line'
        ok2run = ''
        try:
            import matplotlib
            from distutils.version import LooseVersion
            if LooseVersion(matplotlib.__version__) < LooseVersion("1.2.0"):
                ok2run = 'Wrong version of matplotlib. Required >= 1.2.0'
            else:
                matplotlib.use("agg")
                import matplotlib.pyplot as plt
        except:
            ok2run = 'Problem importing matplotlib'
        if ok2run == '':
            simpleapi.StringToPng(String=to_plot, OutputFilename=self.plotfile)
            self.assertGreater(os.path.getsize(self.plotfile), 1e3)
        self.cleanup()
    def _load_single_file(self, filename, output_ws):
        logger.notice("Loading file %s" % filename)

        self._load_data(filename, output_ws)

        if type(mtd[output_ws]) is IEventWorkspace:
            self._contains_event_data = True

        inst_name = mtd[output_ws].getInstrument().getName()
        if inst_name == 'BASIS':
            ModeratorTzeroLinear(InputWorkspace=output_ws,OutputWorkspace= output_ws)
            basis_mask = mtd[output_ws].getInstrument().getStringParameter(
                'Workflow.MaskFile')[0]
            # Quick hack for older BASIS files that only have one side
            #if (mtd[file].getRun()['run_number'] < 16693):
            #        basis_mask = "BASIS_Mask_before_16693.xml"
            basis_mask_filename = os.path.join(config.getString('maskFiles.directory')
                    , basis_mask)
            if os.path.isfile(basis_mask_filename):
                    LoadMask(Instrument="BASIS", OutputWorkspace="__basis_mask",
                             InputFile=basis_mask_filename)
                    MaskDetectors(Workspace=output_ws, MaskedWorkspace="__basis_mask")
            else:
                    logger.notice("Couldn't find specified mask file : " + str(basis_mask_filename))

        if self._parameter_file != None:
            LoadParameterFile(Workspace=output_ws,Filename= self._parameter_file)

        self._monitor_index = self._reducer._get_monitor_index(mtd[output_ws])

        if self._require_chop_data(output_ws):
            ChopData(InputWorkspace=output_ws,OutputWorkspace= output_ws,Step= 20000.0,NChops= 5, IntegrationRangeLower=5000.0,
                IntegrationRangeUpper=10000.0,
                MonitorWorkspaceIndex=self._monitor_index)
            self._multiple_frames = True
        else:
            self._multiple_frames = False

        if ( self._multiple_frames ):
            workspaces = mtd[output_ws].getNames()
        else:
            workspaces = [output_ws]

        logger.debug('self._monitor_index = ' + str(self._monitor_index))

        for ws in workspaces:
            if isinstance(mtd[ws],mantid.api.IEventWorkspace):
                LoadNexusMonitors(Filename=self._data_files[output_ws],
                                  OutputWorkspace= ws+'_mon')
            else:
                ## Extract Monitor Spectrum
                ExtractSingleSpectrum(InputWorkspace=ws,OutputWorkspace= ws+'_mon',WorkspaceIndex= self._monitor_index)

                if self._detector_range_start < 0 or self._detector_range_end > mtd[ws].getNumberHistograms():
                    raise ValueError("Range %d - %d is not a valid detector range." % (self._detector_range_start, self._detector_range_end))

                ## Crop the workspace to remove uninteresting detectors
                CropWorkspace(InputWorkspace=ws,OutputWorkspace= ws,
                    StartWorkspaceIndex=self._detector_range_start,
                    EndWorkspaceIndex=self._detector_range_end)
Beispiel #5
0
    def _group_data(self, workspace):
        grouping = self._grouping_policy
        if ( grouping == 'Individual' ) or ( grouping is None ):
            return workspace
        elif ( grouping == 'All' ):
            nhist = mtd[workspace].getNumberHistograms()
            wslist = []
            for i in range(0, nhist):
                if i not in self._masking_detectors:
                    wslist.append(i)
            GroupDetectors(InputWorkspace=workspace,OutputWorkspace= workspace, 
                WorkspaceIndexList=wslist, Behaviour='Average')
        else:
            # Assume we have a grouping file.
            # First lets, find the file...
            if (os.path.isfile(grouping)):
                grouping_filename = grouping
            else:
                grouping_filename = os.path.join(config.getString('groupingFiles.directory'),
                        grouping)

            #mask detectors before grouping if we need to
            if len(self._masking_detectors) > 0:
                MaskDetectors(workspace, WorkspaceIndexList=self._masking_detectors)

            # Final check that the Mapfile exists, if not don't run the alg.
            if os.path.isfile(grouping_filename):
                GroupDetectors(InputWorkspace=workspace,OutputWorkspace=workspace, MapFile=grouping_filename, 
                        Behaviour='Average')
        return workspace
 def _save_output(self, workspace, mismatch_name):
     # Save the workspace out
     f_name = path.join(config.getString('defaultsave.directory'),
                        mismatch_name)
     save_name = "SaveNexus"
     save_options = {"Filename": f_name, "InputWorkspace": workspace}
     save_alg = create_unmanaged_algorithm(save_name, **save_options)
     save_alg.execute()
 def makeWs(self):
     simpleapi.CreateWorkspace(OutputWorkspace='test1', DataX='1,2,3,4,5,1,2,3,4,5', DataY='1,2,3,4,2,3,4,5',
                               DataE='1,2,3,4,2,3,4,5', NSpec='2', UnitX='TOF', Distribution='1', YUnitlabel="S(q)")
     simpleapi.CreateWorkspace(OutputWorkspace='test2', DataX='1,2,3,4,5,1,2,3,4,5', DataY='1,2,3,4,2,3,4,5',
                               DataE='1,2,3,4,2,3,4,5', NSpec='2',
                               UnitX='Momentum', VerticalAxisUnit='TOF', VerticalAxisValues='1,2', Distribution='1',
                               YUnitLabel='E', WorkspaceTitle='x')
     simpleapi.GroupWorkspaces("test1,test2", OutputWorkspace="group")
     self.plotfile = os.path.join(config.getString('defaultsave.directory'), 'plot.png')
    def makeFiles(self):
        simpleapi.CreateWorkspace(OutputWorkspace='data', DataX='1,2,3,4,5', DataY='1,0,1,4,4', DataE='1,0,1,2,2')
        simpleapi.CreateWorkspace(OutputWorkspace='sim', DataX='1,2,3,4,5', DataY='1,1,1,1,1', DataE='0,0,0,0,0')
        simpleapi.CreateWorkspace(OutputWorkspace='simwrong', DataX='1,2,3,4', DataY='1,1,1,1', DataE='0,0,0,0')

        self.datafile = os.path.join(config.getString('defaultsave.directory'), 'DakotaChiSquared_data.nxs')
        self.simfile = os.path.join(config.getString('defaultsave.directory'), 'DakotaChiSquared_sim.nxs')
        self.simwrongfile = os.path.join(config.getString('defaultsave.directory'), 'DakotaChiSquared_simwrong.nxs')
        self.chifile = os.path.join(config.getString('defaultsave.directory'), 'DakotaChiSquared_chi.txt')

        simpleapi.SaveNexus('data', self.datafile)
        simpleapi.SaveNexus('sim', self.simfile)
        simpleapi.SaveNexus('simwrong', self.simwrongfile)

        ads = AnalysisDataServiceImpl.Instance()
        ads.remove("data")
        ads.remove("sim")
        ads.remove("simwrong")
Beispiel #9
0
 def makeWs(self):
     simpleapi.CreateWorkspace(OutputWorkspace='test1', DataX='1,2,3,4,5,1,2,3,4,5', DataY='1,2,3,4,2,3,4,5',
                               DataE='1,2,3,4,2,3,4,5', NSpec='2', UnitX='dSpacing', Distribution='1', YUnitlabel="S(q)")
     simpleapi.CreateWorkspace(OutputWorkspace='test2', DataX='1,2,3,4,5,1,2,3,4,5', DataY='1,2,3,4,2,3,4,5',
                               DataE='1,2,3,4,2,3,4,5', NSpec='2',
                               UnitX='Momentum', VerticalAxisUnit='TOF', VerticalAxisValues='1,2', Distribution='1',
                               YUnitLabel='E', WorkspaceTitle='x')
     simpleapi.GroupWorkspaces("test1,test2", OutputWorkspace="group")
     self.plotfile = os.path.join(config.getString('defaultsave.directory'), 'plot.png')
    def test_check_archive_logs(self):
        th = test_helper()

        propman = th.reducer.prop_man
        # define unique log file to use instead of real log file
        # for testing all branches of log validation routine
        test_dir = config.getString('defaultsave.directory')
        test_log = os.path.normpath(test_dir + 'lastrun.txt')
        # clear up rubbish from previous runs if any
        if os.path.isfile(test_log):
            os.remove(test_log)
        propman.archive_upload_log_file = test_log

        # no log file at all
        ok, run_num, info = th._check_progress_log_run_completed(10)
        self.assertTrue(ok)
        self.assertEqual(run_num, 0)
        self.assertEqual(info, 'log test disabled as no log file available')

        # Upload log have appeared:
        with open(test_log, 'w') as fh:
            fh.write('MAR 1000 0 \n')
        # need to set up the test log value again, as log test had been disabled automatically if no log file was found
        propman.archive_upload_log_file = test_log

        # log file states data available
        ok, run_num, info = th._check_progress_log_run_completed(10)
        self.assertTrue(ok)
        self.assertEqual(run_num, 1000)
        self.assertEqual(len(info), 0)

        # no changes for the second attempt to look at file
        ok, run_num, info = th._check_progress_log_run_completed(1000)
        self.assertTrue(ok)
        self.assertEqual(run_num, 1000)
        self.assertEqual(info, 'no new data have been added to archive')

        ok, run_num, info = th._check_progress_log_run_completed(1001)
        self.assertFalse(ok)
        self.assertEqual(run_num, 1000)
        self.assertEqual(info, 'no new data have been added to archive')

        with open(test_log, 'w') as fh:
            fh.write('MAR 1001 0 \n')
        m_time = os.path.getmtime(test_log)
        # Update modification time manually as some OS and some tests do not update it properly
        m_time = m_time + 1
        os.utime(test_log, (m_time, m_time))
        # next attempt is successfull
        ok, run_num, info = th._check_progress_log_run_completed(1001)
        self.assertEqual(info, '')
        self.assertEqual(run_num, 1001)
        self.assertTrue(ok)

        os.remove(test_log)
Beispiel #11
0
class StringToPngTest(unittest.TestCase):
    plotfile = os.path.join(config.getString('defaultsave.directory'),
                            "StringToPngTest.png")

    def tearDown(self):
        if os.path.exists(self.plotfile):
            os.remove(self.plotfile)

    def testPlot(self):
        to_plot = 'This is a string\nAnd this is a second line'
        simpleapi.StringToPng(String=to_plot, OutputFilename=self.plotfile)
        self.assertGreater(os.path.getsize(self.plotfile), 1e3)
Beispiel #12
0
    def _compare_workspace(self, workspace, reference_file_name):
        # Load the reference file
        load_name = "LoadNexusProcessed"
        load_options = {
            "Filename": reference_file_name,
            "OutputWorkspace": EMPTY_NAME
        }
        load_alg = create_unmanaged_algorithm(load_name, **load_options)
        load_alg.execute()
        reference_workspace = load_alg.getProperty("OutputWorkspace").value

        # Save the workspace out if the comparison fails (i.e. they are not equal)
        f_name = path.join(
            config.getString('defaultsave.directory'),
            MantidSystemTest.mismatchWorkspaceName(reference_file_name))

        save_name = "SaveNexus"
        save_options = {"Filename": f_name, "InputWorkspace": workspace}
        save_alg = create_unmanaged_algorithm(save_name, **save_options)

        # Compare reference file with the output_workspace
        # We need to disable the instrument comparison, it takes way too long
        # We need to disable the sample -- Not clear why yet
        # operation how many entries can be found in the sample logs
        compare_name = "CompareWorkspaces"
        compare_options = {
            "Workspace1": workspace,
            "Workspace2": reference_workspace,
            "Tolerance": 1e-6,
            "CheckInstrument": False,
            "CheckSample": False,
            "ToleranceRelErr": True,
            "CheckAllData": True,
            "CheckMasking": True,
            "CheckType": True,
            "CheckAxes": True,
            "CheckSpectraMap": True
        }
        compare_alg = create_unmanaged_algorithm(compare_name,
                                                 **compare_options)
        compare_alg.setChild(False)
        compare_alg.execute()
        result = compare_alg.getProperty("Result").value

        if not result:
            save_alg.execute()

        self.assertTrue(result)
Beispiel #13
0
    def _group_data(self, workspace):
        grouping = self._grouping_policy
        if ( grouping == 'Individual' ) or ( grouping is None ):
            return workspace
        elif ( grouping == 'All' ):
            nhist = mtd[workspace].getNumberHistograms()
            wslist = []
            for i in range(0, nhist):
                if i not in self._masking_detectors:
                    wslist.append(i)
            GroupDetectors(InputWorkspace=workspace, OutputWorkspace=workspace, 
                           WorkspaceIndexList=wslist, Behaviour='Average')
        else:
            # We may have either a workspace name or a mapping file name here
            grouping_workspace = None
            grouping_filename = None

            # See if it a workspace in ADS
            # If not assume it is a mapping file
            try:
                grouping_workspace = mtd[grouping]
            except KeyError:
                logger.notice("Cannot find group workspace " + grouping + ", attempting to find as file")

                # See if it is an absolute path
                # Otherwise check in the default group files directory
                if (os.path.isfile(grouping)):
                    grouping_filename = grouping
                else:
                    grouping_filename = os.path.join(config.getString('groupingFiles.directory'), grouping)

            # Mask detectors before grouping if we need to
            if len(self._masking_detectors) > 0:
                MaskDetectors(workspace, WorkspaceIndexList=self._masking_detectors)

            # Run GroupDetectors with a workspace if we have one
            # Otherwise try to run it with a mapping file
            if grouping_workspace is not None:
                GroupDetectors(InputWorkspace=workspace, OutputWorkspace=workspace, CopyGroupingFromWorkspace=grouping_workspace, 
                        Behaviour='Average')
            elif os.path.isfile(grouping_filename):
                GroupDetectors(InputWorkspace=workspace, OutputWorkspace=workspace, MapFile=grouping_filename, 
                        Behaviour='Average')

        return workspace
    def _group_fixed(self, workspace):
        try:
            grps = mtd[workspace].getInstrument().getStringParameter("Workflow.FixedGrouping")[0]
        except IndexError:
            raise AttributeError("Could not retrieve fixed grouping setting " "from the instrument parameter file.")

        groups = grps.split(",")
        group_list = []
        for group in groups:
            group_to_from = group.split("-")
            group_vals = range(int(group_to_from[0]), int(group_to_from[1]) + 1)
            group_list.append(group_vals)

        for i in self._masking_detectors:
            for grp in group_list:
                try:
                    grp.remove(i)
                except ValueError:
                    pass

        xml = '<?xml version="1.0" encoding="UTF-8" ?>\n'
        xml += "<detector-grouping>\n"
        for grp in group_list:
            xml += '<group name="group">\n'
            xml += '    <ids val="'
            for i in grp:
                xml += str(i + 1)
                if i != (grp[len(grp) - 1]):
                    xml += ","
            xml += '"/>\n'
            xml += "</group>\n"
        xml += "</detector-grouping>\n"

        xfile = os.path.join(config.getString("defaultsave.directory"), "fixedGrp.xml")
        file = open(xfile, "w")
        file.write(xml)
        file.close()
        GroupDetectors(InputWorkspace=workspace, OutputWorkspace=workspace, MapFile=xfile, Behaviour="Average")
        return workspace
Beispiel #15
0
"""
Script to perform absolute units data reduction for MAPS
"""
from qtiGenie import *
from mantid.simpleapi import *
from mantid import config

import time

#instrument name:
inst='map'
iliad_setup(inst)
ext='.raw'

# where to save resutls (usually specified in Mantid, data search directories)
save_dir = config.getString('defaultsave.directory')
if len(save_dir) ==0 :
    config['defaultsave.directory']=os.getcwd()
    save_dir = config.getString('defaultsave.directory')
    
print "Data will be saved into: ",save_dir
# map mask and cal file, again the values from Mantid, data search directories can be modified here
config.appendDataSearchDir('/home/maps/mprogs/InstrumentFiles/maps') 
# data (raw or nxs) run files -- values from data search directories can be modified here
config.appendDataSearchDir('/isisdatar55/NDXMAPS/Instrument/data/cycle_12_3') 

maskfile='4to1_022.msk' #'testMask2.msk'#hard mask out the edges of detectors, which tend to be noisy

#map file
mapfile='4to1' #single crystal mapping file
#mapfile='/opt/Mantid/instrument/mapfiles/maps/parker_rings' #powder mapping file
Beispiel #16
0
def group_spectra(workspace_name, masked_detectors, method, group_file=None, group_ws=None):
    """
    Groups spectra in a given workspace according to the Workflow.GroupingMethod and
    Workflow.GroupingFile parameters and GrpupingPolicy property.

    @param workspace_name Name of workspace to group spectra of
    @param masked_detectors List of spectra numbers to mask
    @param method Grouping method (IPF, All, Individual, File, Workspace)
    @param group_file File for File method
    @param group_ws Workspace for Workspace method
    """
    from mantid.simpleapi import (MaskDetectors, GroupDetectors)

    instrument = mtd[workspace_name].getInstrument()

    # If grouping as per he IPF is desired
    if method == 'IPF':
        # Get the grouping method from the parameter file
        try:
            grouping_method = instrument.getStringParameter('Workflow.GroupingMethod')[0]
        except IndexError:
            grouping_method = 'Individual'

    else:
        # Otherwise use the value of GroupingPolicy
        grouping_method = method

    logger.information('Grouping method for workspace %s is %s' % (workspace_name, grouping_method))

    if grouping_method == 'Individual':
        # Nothing to do here
        return

    elif grouping_method == 'All':
        # Get a list of all spectra minus those which are masked
        num_spec = mtd[workspace_name].getNumberHistograms()
        spectra_list = [spec for spec in range(0, num_spec) if spec not in masked_detectors]

        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       WorkspaceIndexList=spectra_list)

    elif grouping_method == 'File':
        # Get the filename for the grouping file
        if group_file is not None:
            grouping_file = group_file
        else:
            try:
                grouping_file = instrument.getStringParameter('Workflow.GroupingFile')[0]
            except IndexError:
                raise RuntimeError('Cannot get grouping file from properties or IPF.')

        # If the file is not found assume it is in the grouping files directory
        if not os.path.isfile(grouping_file):
            grouping_file = os.path.join(config.getString('groupingFiles.directory'), grouping_file)

        # If it is still not found just give up
        if not os.path.isfile(grouping_file):
            raise RuntimeError('Cannot find grouping file: %s' % (grouping_file))

        # Mask detectors if required
        if len(masked_detectors) > 0:
            MaskDetectors(Workspace=workspace_name,
                          WorkspaceIndexList=masked_detectors)

        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       MapFile=grouping_file)

    elif grouping_method == 'Workspace':
        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       CopyGroupingFromWorkspace=group_ws)

    else:
        raise RuntimeError('Invalid grouping method %s for workspace %s' % (grouping_method, workspace_name))
Beispiel #17
0
def group_spectra_of(workspace,
                     masked_detectors,
                     method,
                     group_file=None,
                     group_ws=None,
                     group_string=None):
    """
    Groups spectra in a given workspace according to the Workflow.GroupingMethod and
    Workflow.GroupingFile parameters and GroupingPolicy property.

    @param workspace Workspace to group spectra of
    @param masked_detectors List of spectra numbers to mask
    @param method Grouping method (IPF, All, Individual, File, Workspace)
    @param group_file File for File method
    @param group_ws Workspace for Workspace method
    @param group_string String for custom method - comma separated list or range
    """
    instrument = workspace.getInstrument()
    group_detectors = AlgorithmManager.create("GroupDetectors")
    group_detectors.setChild(True)
    group_detectors.setProperty("InputWorkspace", workspace)
    group_detectors.setProperty("Behaviour", 'Average')

    # If grouping as per he IPF is desired
    if method == 'IPF':
        # Get the grouping method from the parameter file
        try:
            grouping_method = instrument.getStringParameter(
                'Workflow.GroupingMethod')[0]
        except IndexError:
            grouping_method = 'Individual'

    else:
        # Otherwise use the value of GroupingPolicy
        grouping_method = method

    logger.information('Grouping method for workspace %s is %s' %
                       (workspace.name(), grouping_method))

    if grouping_method == 'Individual':
        # Nothing to do here
        return None

    elif grouping_method == 'All':
        # Get a list of all spectra minus those which are masked
        num_spec = workspace.getNumberHistograms()
        spectra_list = [
            spec for spec in range(0, num_spec) if spec not in masked_detectors
        ]

        # Apply the grouping
        group_detectors.setProperty("WorkspaceIndexList", spectra_list)

    elif grouping_method == 'File':
        # Get the filename for the grouping file
        if group_file is not None:
            grouping_file = group_file
            group_detectors.setProperty("ExcludeGroupNumbers", [0])
        else:
            try:
                grouping_file = instrument.getStringParameter(
                    'Workflow.GroupingFile')[0]
            except IndexError:
                raise RuntimeError(
                    'Cannot get grouping file from properties or IPF.')

        # If the file is not found assume it is in the grouping files directory
        if not os.path.isfile(grouping_file):
            grouping_file = os.path.join(
                config.getString('groupingFiles.directory'), grouping_file)

        # If it is still not found just give up
        if not os.path.isfile(grouping_file):
            raise RuntimeError('Cannot find grouping file: %s' % grouping_file)

        # Mask detectors if required
        if len(masked_detectors) > 0:
            _mask_detectors(workspace, masked_detectors)

        # Apply the grouping
        group_detectors.setProperty("MapFile", grouping_file)

    elif grouping_method == 'Workspace':
        # Apply the grouping
        group_detectors.setProperty("CopyGroupingFromWorkspace", group_ws)

    elif grouping_method == 'Custom':
        # Mask detectors if required
        if len(masked_detectors) > 0:
            _mask_detectors(workspace, masked_detectors)
        return group_on_string(group_detectors, group_string)

    else:
        raise RuntimeError('Invalid grouping method %s for workspace %s' %
                           (grouping_method, workspace.name()))

    group_detectors.setProperty("OutputWorkspace", "__temp")
    group_detectors.execute()
    return group_detectors.getProperty("OutputWorkspace").value
def group_spectra(workspace_name, masked_detectors, method, group_file=None, group_ws=None):
    """
    Groups spectra in a given workspace according to the Workflow.GroupingMethod and
    Workflow.GroupingFile parameters and GroupingPolicy property.

    @param workspace_name Name of workspace to group spectra of
    @param masked_detectors List of spectra numbers to mask
    @param method Grouping method (IPF, All, Individual, File, Workspace)
    @param group_file File for File method
    @param group_ws Workspace for Workspace method
    """
    from mantid.simpleapi import (MaskDetectors, GroupDetectors)

    instrument = mtd[workspace_name].getInstrument()

    # If grouping as per he IPF is desired
    if method == 'IPF':
        # Get the grouping method from the parameter file
        try:
            grouping_method = instrument.getStringParameter('Workflow.GroupingMethod')[0]
        except IndexError:
            grouping_method = 'Individual'

    else:
        # Otherwise use the value of GroupingPolicy
        grouping_method = method

    logger.information('Grouping method for workspace %s is %s' % (workspace_name, grouping_method))

    if grouping_method == 'Individual':
        # Nothing to do here
        return

    elif grouping_method == 'All':
        # Get a list of all spectra minus those which are masked
        num_spec = mtd[workspace_name].getNumberHistograms()
        spectra_list = [spec for spec in range(0, num_spec) if spec not in masked_detectors]

        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       WorkspaceIndexList=spectra_list)

    elif grouping_method == 'File':
        # Get the filename for the grouping file
        if group_file is not None:
            grouping_file = group_file
        else:
            try:
                grouping_file = instrument.getStringParameter('Workflow.GroupingFile')[0]
            except IndexError:
                raise RuntimeError('Cannot get grouping file from properties or IPF.')

        # If the file is not found assume it is in the grouping files directory
        if not os.path.isfile(grouping_file):
            grouping_file = os.path.join(config.getString('groupingFiles.directory'), grouping_file)

        # If it is still not found just give up
        if not os.path.isfile(grouping_file):
            raise RuntimeError('Cannot find grouping file: %s' % grouping_file)

        # Mask detectors if required
        if len(masked_detectors) > 0:
            MaskDetectors(Workspace=workspace_name,
                          WorkspaceIndexList=masked_detectors)

        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       MapFile=grouping_file)

    elif grouping_method == 'Workspace':
        # Apply the grouping
        GroupDetectors(InputWorkspace=workspace_name,
                       OutputWorkspace=workspace_name,
                       Behaviour='Average',
                       CopyGroupingFromWorkspace=group_ws)

    else:
        raise RuntimeError('Invalid grouping method %s for workspace %s' % (grouping_method, workspace_name))
Beispiel #19
0
def export_masks(ws,fileName='',returnMasksOnly=False):
    """Exports masks applied to Mantid workspace
       (e.g. drawn using the instrument view) and write these masks
       into the old fashioned ASCII .msk file containing masked spectra numbers.

       The file is Libisis/Mantid old ISIS format compatible and can be read by Libisis
       or Mantid LoadMasks algorithm

       If optional parameter fileName is present, the masks are saved
       in the file with this name
       Otherwise, the file with the name equal to the workspace
       name and the extension .msk is used.

       If returnMasks is set to True, the function does not write to file but returns
       list of masks instead.
    """
   # get pointer to the workspace
    if isinstance(ws, str):
        pws = mtd[ws]
    else:
        pws = ws

    ws_name=pws.getName()
    nhist = pws.getNumberHistograms()

    no_detectors = 0
    masks = []
    for i in range(nhist):
        # set provisional spectra ID
        ms = i+1
        try:
            sp = pws.getSpectrum(i)
            # got real spectra ID, which would correspond real spectra num to spectra ID map
            ms = sp.getSpectrumNo()
#pylint: disable=W0703
        except Exception:
            logger.notice("Can not retrieve spectra No: " + str(i) + ". Have masked it")
            masks.append(ms)
            continue
        try:
            det = pws.getDetector(i)
#pylint: disable=W0703
        except Exception:
            no_detectors = no_detectors +1
            masks.append(ms)
            continue
        if det.isMasked():
            masks.append(ms)

    filename=''
    if len(fileName)==0 :
        filename = os.path.join(config.getString('defaultsave.directory'),ws_name+'.msk')
    else:
        filename = fileName

    nMasks = len(masks)
    if nMasks == 0:
        if returnMasksOnly:
            logger.warning("Workspace {0} have no masked spectra. File {1} have not been created".format(ws_name,filename))
        else:
            logger.notice("Workspace "+ws_name+" have no masked spectra")
        return masks

    logger.notice("Workspace {0} has {1} masked spectra, including {2} spectra without detectors".format(ws_name,nMasks,no_detectors))

    if not returnMasksOnly :
        writeISISmasks(filename,masks,8)
    return masks
"""
Script to perform absolute units data reduction for MARI
"""
from qtiGenie import *
from mantid.simpleapi import *
from mantid import config

import time

#instrument name:
inst=''
iliad_setup(inst)
ext='.raw'

# where to save resutls (usually specified in Mantid, data search directories)
save_dir = config.getString('defaultsave.directory')
if len(save_dir) ==0 :
    config['defaultsave.directory']=os.getcwd()
    save_dir = config.getString('defaultsave.directory')
    
print "Data will be saved into: ",save_dir
# map mask and cal file, again the values from Mantid, data search directories can be modified here
config.appendDataSearchDir('/usr/local/mprogs/InstrumentFiles/mari') 
# data (raw or nxs) run files -- values from data search directories can be modified here
config.appendDataSearchDir('/isisdatar55/NDXMARI/Instrument/data/cycle_05_1') 
config.appendDataSearchDir(r'd:/Data/MantidSystemTests/Data') 


maskfile='mar11015.msk' #'testMask2.msk'#hard mask out the edges of detectors, which tend to be noisy

#map file
def group_spectra_of(workspace, masked_detectors, method, group_file=None, group_ws=None):
    """
    Groups spectra in a given workspace according to the Workflow.GroupingMethod and
    Workflow.GroupingFile parameters and GroupingPolicy property.

    @param workspace Workspace to group spectra of
    @param masked_detectors List of spectra numbers to mask
    @param method Grouping method (IPF, All, Individual, File, Workspace)
    @param group_file File for File method
    @param group_ws Workspace for Workspace method
    """
    instrument = workspace.getInstrument()
    group_detectors = AlgorithmManager.create("GroupDetectors")
    group_detectors.setChild(True)
    group_detectors.setProperty("InputWorkspace", workspace)
    group_detectors.setProperty("Behaviour", 'Average')

    # If grouping as per he IPF is desired
    if method == 'IPF':
        # Get the grouping method from the parameter file
        try:
            grouping_method = instrument.getStringParameter('Workflow.GroupingMethod')[0]
        except IndexError:
            grouping_method = 'Individual'

    else:
        # Otherwise use the value of GroupingPolicy
        grouping_method = method

    logger.information('Grouping method for workspace %s is %s' % (workspace.getName(), grouping_method))

    if grouping_method == 'Individual':
        # Nothing to do here
        return None

    elif grouping_method == 'All':
        # Get a list of all spectra minus those which are masked
        num_spec = workspace.getNumberHistograms()
        spectra_list = [spec for spec in range(0, num_spec) if spec not in masked_detectors]

        # Apply the grouping
        group_detectors.setProperty("WorkspaceIndexList", spectra_list)

    elif grouping_method == 'File':
        # Get the filename for the grouping file
        if group_file is not None:
            grouping_file = group_file
        else:
            try:
                grouping_file = instrument.getStringParameter('Workflow.GroupingFile')[0]
            except IndexError:
                raise RuntimeError('Cannot get grouping file from properties or IPF.')

        # If the file is not found assume it is in the grouping files directory
        if not os.path.isfile(grouping_file):
            grouping_file = os.path.join(config.getString('groupingFiles.directory'), grouping_file)

        # If it is still not found just give up
        if not os.path.isfile(grouping_file):
            raise RuntimeError('Cannot find grouping file: %s' % grouping_file)

        # Mask detectors if required
        if len(masked_detectors) > 0:
            _mask_detectors(workspace, masked_detectors)

        # Apply the grouping
        group_detectors.setProperty("MapFile", grouping_file)

    elif grouping_method == 'Workspace':
        # Apply the grouping
        group_detectors.setProperty("CopyGroupingFromWorkspace", group_ws)

    else:
        raise RuntimeError('Invalid grouping method %s for workspace %s' % (grouping_method, workspace.getName()))

    group_detectors.setProperty("OutputWorkspace", "__temp")
    group_detectors.execute()
    return group_detectors.getProperty("OutputWorkspace").value
    def execute(self, reducer, file_ws):
        """Loads the data.
        """
        wsname = ""

        for file in self._data_files:
            logger.notice("Loading file %s" % file)

            loaded_ws = Load(Filename=self._data_files[file], OutputWorkspace=file, LoadLogFiles=False)
            loader_handle = loaded_ws.getHistory().lastAlgorithm()
            loader_name = loader_handle.getPropertyValue("LoaderName")

            if mtd[file].getInstrument().getName() == "BASIS":
                ModeratorTzero(InputWorkspace=file, OutputWorkspace=file)
                basis_mask = mtd[file].getInstrument().getStringParameter("Workflow.MaskFile")[0]
                # Quick hack for older BASIS files that only have one side
                # if (mtd[file].getRun()['run_number'] < 16693):
                #        basis_mask = "BASIS_Mask_before_16693.xml"
                basis_mask_filename = os.path.join(config.getString("maskFiles.directory"), basis_mask)
                if os.path.isfile(basis_mask_filename):
                    LoadMask(Instrument="BASIS", OutputWorkspace="__basis_mask", InputFile=basis_mask_filename)
                    MaskDetectors(Workspace=file, MaskedWorkspace="__basis_mask")
                else:
                    logger.notice("Couldn't find specified mask file : " + str(basis_mask_filename))

            if self._parameter_file != None:
                LoadParameterFile(Workspace=file, Filename=self._parameter_file)

            if wsname == "":
                wsname = file

            if self._require_chop_data(file):
                ChopData(
                    InputWorkspace=file,
                    OutputWorkspace=file,
                    Step=20000.0,
                    NChops=5,
                    IntegrationRangeLower=5000.0,
                    IntegrationRangeUpper=10000.0,
                    MonitorWorkspaceIndex=self._monitor_index,
                )
                self._multiple_frames = True
            else:
                self._multiple_frames = False

            if self._multiple_frames:
                workspaces = mtd[file].getNames()
            else:
                workspaces = [file]

            logger.debug("self._monitor_index = " + str(self._monitor_index))

            for ws in workspaces:
                if loader_name.endswith("Nexus"):
                    LoadNexusMonitors(Filename=self._data_files[file], OutputWorkspace=ws + "_mon")
                else:
                    ## Extract Monitor Spectrum
                    ExtractSingleSpectrum(
                        InputWorkspace=ws, OutputWorkspace=ws + "_mon", WorkspaceIndex=self._monitor_index
                    )
                    ## Crop the workspace to remove uninteresting detectors
                    CropWorkspace(
                        InputWorkspace=ws,
                        OutputWorkspace=ws,
                        StartWorkspaceIndex=self._detector_range_start,
                        EndWorkspaceIndex=self._detector_range_end,
                    )

            try:
                msk = mtd[workspaces[0]].getInstrument().getStringParameter("Workflow.Masking")[0]
            except IndexError:
                msk = "None"
            if msk == "IdentifyNoisyDetectors":
                self._identify_bad_detectors(workspaces[0])

        if (self._sum) and (len(self._data_files) > 1):
            ## Sum files
            merges = []
            if self._multiple_frames:
                self._sum_chopped(wsname)
            else:
                self._sum_regular(wsname)
            ## Need to adjust the reducer's list of workspaces
            self._data_files = {}
            self._data_files[wsname] = wsname