Esempio n. 1
0
def convertToHKL(ws,
                 OutputWorkspace='__md_hkl',
                 norm=None,
                 UB=None,
                 Extents=[-10, 10, -10, 10, -10, 10],
                 Bins=[101, 101, 101],
                 Append=False):
    """Output MDEventWorkspace in HKL
    """

    SetUB(ws, UB=UB)
    ConvertToMD(ws,
                QDimensions='Q3D',
                QConversionScales='HKL',
                dEAnalysisMode='Elastic',
                Q3DFrames='HKL',
                OutputWorkspace='__temp')

    AlignedDim0 = "{},{},{},{}".format(mtd['__temp'].getDimension(0).name,
                                       Extents[0], Extents[1], int(Bins[0]))
    AlignedDim1 = "{},{},{},{}".format(mtd['__temp'].getDimension(1).name,
                                       Extents[2], Extents[3], int(Bins[1]))
    AlignedDim2 = "{},{},{},{}".format(mtd['__temp'].getDimension(2).name,
                                       Extents[4], Extents[5], int(Bins[2]))

    BinMD(InputWorkspace='__temp',
          TemporaryDataWorkspace=OutputWorkspace
          if Append and mtd.doesExist(OutputWorkspace) else None,
          OutputWorkspace=OutputWorkspace,
          AlignedDim0=AlignedDim0,
          AlignedDim1=AlignedDim1,
          AlignedDim2=AlignedDim2)
    DeleteWorkspace('__temp')
    if norm is not None:
        SetUB(norm, UB=UB)
        ConvertToMD(norm,
                    QDimensions='Q3D',
                    QConversionScales='HKL',
                    dEAnalysisMode='Elastic',
                    Q3DFrames='HKL',
                    OutputWorkspace='__temp_norm')
        BinMD(InputWorkspace='__temp_norm',
              TemporaryDataWorkspace=str(OutputWorkspace) + '_norm' if Append
              and mtd.doesExist(str(OutputWorkspace) + '_norm') else None,
              OutputWorkspace=str(OutputWorkspace) + '_norm',
              AlignedDim0=AlignedDim0,
              AlignedDim1=AlignedDim1,
              AlignedDim2=AlignedDim2)
        DeleteWorkspace('__temp_norm')
    return OutputWorkspace
 def _extractMonitors(self, ws):
     """Extract monitor spectra from ws to another workspace."""
     detWSName = self._names.withSuffix('detectors')
     monWSName = self._names.withSuffix('monitors')
     ExtractMonitors(InputWorkspace=ws,
                     DetectorWorkspace=detWSName,
                     MonitorWorkspace=monWSName,
                     EnableLogging=self._subalgLogging)
     if mtd.doesExist(detWSName) is None:
         raise RuntimeError('No detectors in the input data.')
     detWS = mtd[detWSName]
     monWS = mtd[monWSName] if mtd.doesExist(monWSName) else None
     self._cleanup.cleanup(ws)
     return detWS, monWS
 def _extractMonitors(self, ws):
     """Extract monitor spectra from ws to another workspace."""
     detWSName = self._names.withSuffix('detectors')
     monWSName = self._names.withSuffix('monitors')
     ExtractMonitors(InputWorkspace=ws,
                     DetectorWorkspace=detWSName,
                     MonitorWorkspace=monWSName,
                     EnableLogging=self._subalgLogging)
     if mtd.doesExist(detWSName) is None:
         raise RuntimeError('No detectors in the input data.')
     detWS = mtd[detWSName]
     monWS = mtd[monWSName] if mtd.doesExist(monWSName) else None
     self._cleanup.cleanup(ws)
     return detWS, monWS
Esempio n. 4
0
    def _getMaskWSname(self):
        masking = self.getProperty("Masking").value
        maskWSname = None
        maskFile = None

        # none and workspace are special
        if masking == 'None':
            pass
        elif masking == "Masking Workspace":
            maskWSname = str(self.getProperty("MaskingWorkspace").value)

        # deal with files
        elif masking == 'Custom - xml masking file':
            maskWSname = 'CustomMask'
            maskFile = self.getProperty('MaskingFilename').value
        # TODO not reading the correct mask file geometry
        elif masking == 'Horizontal' or masking == 'Vertical':
            maskWSname = masking + 'Mask'  # append the work 'Mask' for the wksp name
            if not mtd.doesExist(maskWSname):  # only load if it isn't already loaded
                maskFile = '/SNS/SNAP/shared/libs/%s_Mask.xml' % masking

        if maskFile is not None:
            LoadMask(InputFile=maskFile, Instrument='SNAP', OutputWorkspace=maskWSname)

        if maskWSname is None:
            maskWSname = ''
        return maskWSname
def add_directory_structure(dirs):
    """
    create the nested WorkspaceGroup structure in the ADS specified by the
    stored directory attribute.
    dirs = ["dir1", "dir2"] eg. ['Muon Data', 'MUSR72105', 'MUSR72105 Raw Data']
    """
    if not dirs:
        return
    if len(dirs) > len(set(dirs)):
        raise ValueError("Group names must be unique")

    for directory in dirs:
        if not mtd.doesExist(directory):
            workspace_group = api.WorkspaceGroup()
            mtd.addOrReplace(directory, workspace_group)
        elif not isinstance(mtd[directory], api.WorkspaceGroup):
            mtd.remove(directory)
            workspace_group = api.WorkspaceGroup()
            mtd.addOrReplace(directory, workspace_group)
        else:
            # exists and is a workspace group
            pass

    # Create the nested group structure in the ADS
    previous_dir = ""
    for i, directory in enumerate(dirs):
        if i == 0:
            previous_dir = directory
            continue
        mtd[previous_dir].add(directory)
        previous_dir = directory
 def _waterCalibration(self, ws):
     """Divide ws by a (water) reference workspace."""
     if self.getProperty(Prop.WATER_REFERENCE).isDefault:
         return ws
     waterWS = self.getProperty(Prop.WATER_REFERENCE).value
     detWSName = self._names.withSuffix('water_detectors')
     waterWS = ExtractMonitors(InputWorkspace=waterWS,
                               DetectorWorkspace=detWSName,
                               EnableLogging=self._subalgLogging)
     if mtd.doesExist(detWSName) is None:
         raise RuntimeError('No detectors in the water reference data.')
     if waterWS.getNumberHistograms() != ws.getNumberHistograms():
         self.log().error(
             'Water workspace and run do not have the same number of histograms.'
         )
     rebinnedWaterWSName = self._names.withSuffix('water_rebinned')
     rebinnedWaterWS = RebinToWorkspace(WorkspaceToRebin=waterWS,
                                        WorkspaceToMatch=ws,
                                        OutputWorkspace=rebinnedWaterWSName,
                                        EnableLogging=self._subalgLogging)
     calibratedWSName = self._names.withSuffix('water_calibrated')
     calibratedWS = Divide(LHSWorkspace=ws,
                           RHSWorkspace=rebinnedWaterWS,
                           OutputWorkspace=calibratedWSName,
                           EnableLogging=self._subalgLogging)
     self._cleanup.cleanup(waterWS)
     self._cleanup.cleanup(rebinnedWaterWS)
     self._cleanup.cleanup(ws)
     return calibratedWS
Esempio n. 7
0
def save_mantid_nexus(workspace_name, file_name, title=''):
    """
    save workspace to NeXus for Mantid to import
    :param workspace_name:
    :param file_name:
    :param title:
    :return:
    """
    # check input
    checkdatatypes.check_file_name(file_name,
                                   check_exist=False,
                                   check_writable=True,
                                   is_dir=False)
    checkdatatypes.check_string_variable('Workspace title', title)

    # check workspace
    checkdatatypes.check_string_variable('Workspace name', workspace_name)
    if mtd.doesExist(workspace_name):
        SaveNexusProcessed(InputWorkspace=workspace_name,
                           Filename=file_name,
                           Title=title)
    else:
        raise RuntimeError(
            'Workspace {0} does not exist in Analysis data service. Available '
            'workspaces are {1}.'
            ''.format(workspace_name, mtd.getObjectNames()))
Esempio n. 8
0
    def live_monitor(self, input_ws, output_ws):
        """

        :return:
        """
        # Now get the data, read the first spectra
        spectra = input_ws.readY(0)
        # extract the first value from the array
        count = spectra[0]
        print(f'Total count: {count}')

        count = input_ws.getNumberEvents()

        # output it as a log message
        logger.notice("Total counts so far " + str(count))

        # if my ouput workspace has not been created yet, create it.
        if not mtd.doesExist(output_ws):
            table = CreateEmptyTableWorkspace(OutputWorkspace=output_ws)
            table.setTitle("Event Rate History")
            table.addColumn("str", "Time")
            table.addColumn('str', 'EventsS')
            table.addColumn("int", "Events")

        table = mtd[output_ws]
        assert table
    def prepare_background(input_md, reference_sample_mde, background_md: str):
        """Prepare background MDEventWorkspace from reduced sample Matrix
        This is the previous solution to merge all the ExperimentInfo
        """
        dgs_data = CloneWorkspace(input_md)
        data_MDE = mtd[reference_sample_mde]
        if mtd.doesExist('background_MDE'):
            DeleteWorkspace('background_MDE')

        for i in range(data_MDE.getNumExperimentInfo()):
            phi, chi, omega = data_MDE.getExperimentInfo(
                i).run().getGoniometer().getEulerAngles('YZY')
            AddSampleLogMultiple(Workspace=dgs_data,
                                 LogNames='phi, chi, omega',
                                 LogValues='{},{},{}'.format(phi, chi, omega))
            SetGoniometer(Workspace=dgs_data, Goniometers='Universal')
            ConvertToMD(InputWorkspace=dgs_data,
                        QDimensions='Q3D',
                        dEAnalysisMode='Direct',
                        Q3DFrames="Q_sample",
                        MinValues='-11,-11,-11,-25',
                        MaxValues='11,11,11,49',
                        PreprocDetectorsWS='-',
                        OverwriteExisting=False,
                        OutputWorkspace=background_md)
 def _waterCalibration(self, ws):
     """Divide ws by a (water) reference workspace."""
     if self.getProperty(Prop.WATER_REFERENCE).isDefault:
         return ws
     waterWS = self.getProperty(Prop.WATER_REFERENCE).value
     detWSName = self._names.withSuffix('water_detectors')
     waterWS = ExtractMonitors(
         InputWorkspace=waterWS,
         DetectorWorkspace=detWSName,
         EnableLogging=self._subalgLogging
     )
     if mtd.doesExist(detWSName) is None:
         raise RuntimeError('No detectors in the water reference data.')
     if waterWS.getNumberHistograms() != ws.getNumberHistograms():
         self.log().error('Water workspace and run do not have the same number of histograms.')
     rebinnedWaterWSName = self._names.withSuffix('water_rebinned')
     rebinnedWaterWS = RebinToWorkspace(
         WorkspaceToRebin=waterWS,
         WorkspaceToMatch=ws,
         OutputWorkspace=rebinnedWaterWSName,
         EnableLogging=self._subalgLogging
     )
     calibratedWSName = self._names.withSuffix('water_calibrated')
     calibratedWS = Divide(
         LHSWorkspace=ws,
         RHSWorkspace=rebinnedWaterWS,
         OutputWorkspace=calibratedWSName,
         EnableLogging=self._subalgLogging
     )
     self._cleanup.cleanup(waterWS)
     self._cleanup.cleanup(rebinnedWaterWS)
     self._cleanup.cleanup(ws)
     return calibratedWS
Esempio n. 11
0
def convertQSampleToHKL(ws,
                        OutputWorkspace='__md_hkl',
                        norm=None,
                        UB=None,
                        Extents=[-10, 10, -10, 10, -10, 10],
                        Bins=[101, 101, 101],
                        Append=False):
    ol = OrientedLattice()
    ol.setUB(UB)
    q1 = ol.qFromHKL([1, 0, 0])
    q2 = ol.qFromHKL([0, 1, 0])
    q3 = ol.qFromHKL([0, 0, 1])
    BinMD(InputWorkspace=ws,
          AxisAligned=False,
          NormalizeBasisVectors=False,
          BasisVector0='[H,0,0],A^-1,{},{},{}'.format(q1.X(), q1.Y(), q1.Z()),
          BasisVector1='[0,K,0],A^-1,{},{},{}'.format(q2.X(), q2.Y(), q2.Z()),
          BasisVector2='[0,0,L],A^-1,{},{},{}'.format(q3.X(), q3.Y(), q3.Z()),
          OutputExtents=Extents,
          OutputBins=Bins,
          TemporaryDataWorkspace=OutputWorkspace
          if Append and mtd.doesExist(OutputWorkspace) else None,
          OutputWorkspace=OutputWorkspace)
    if norm is not None:
        mtd[str(norm)].run().getGoniometer().setR(
            mtd[str(ws)].getExperimentInfo(0).run().getGoniometer().getR())
        convertToHKL(norm,
                     OutputWorkspace=str(OutputWorkspace) + '_norm',
                     UB=UB,
                     Extents=Extents,
                     Bins=Bins,
                     Append=Append)
    return OutputWorkspace
Esempio n. 12
0
    def _getMaskWSname(self):
        masking = self.getProperty("Masking").value
        maskWSname = None
        maskFile = None

        # none and workspace are special
        if masking == 'None':
            pass
        elif masking == "Masking Workspace":
            maskWSname = str(self.getProperty("MaskingWorkspace").value)

        # deal with files
        elif masking == 'Custom - xml masking file':
            maskWSname = 'CustomMask'
            maskFile = self.getProperty('MaskingFilename').value
        elif masking == 'Horizontal' or masking == 'Vertical':
            maskWSname = masking + 'Mask'  # append the work 'Mask' for the wksp name
            if not mtd.doesExist(
                    maskWSname):  # only load if it isn't already loaded
                maskFile = '/SNS/SNAP/shared/libs/%s_Mask.xml' % masking

        if maskFile is not None:
            LoadMask(InputFile=maskFile,
                     Instrument='SNAP',
                     OutputWorkspace=maskWSname)

        return maskWSname
Esempio n. 13
0
    def validateInputs(self):
        issues = dict()

        input_ws = self.getProperty("InputWorkspace").value

        if input_ws.getSpecialCoordinateSystem().name != "QSample":
            issues["InputWorkspace"] = "Input workspace expected to be in QSample, " \
                                       "workspace is in '{}'".format(input_ws.getSpecialCoordinateSystem().name)

        ndims = input_ws.getNumDims()
        if ndims != 3:
            issues["InputWorkspace"] = "Input workspace needs 3 dimensions, it has {} dimensions.".format(ndims)

        # Check that extents and bins were provided for every dimension
        extents = self.getProperty("Extents").value
        bins = self.getProperty("Bins").value

        if len(extents) != ndims * 2:
            issues["Extents"] = "Expected a min and max value for each " \
                                "dimension (got {}, expected {}).".format(len(extents), ndims*2)

        if len(bins) != ndims:
            issues["Bins"] = "Expected a number of bins for each dimension."

        # Check if a PeaksWorkspace was provided
        peak_ws = self.getProperty("PeaksWorkspace")
        if not peak_ws.isDefault:
            if not mtd.doesExist(self.getPropertyValue("PeaksWorkspace")):
                issues["PeaksWorkspace"] = "Provided peaks workspace does not exist in the ADS."
        else:
            # Check that the workspace has a UB matrix
            if not (input_ws.getNumExperimentInfo() > 0 and input_ws.getExperimentInfo(0).sample().hasOrientedLattice()):
                issues["InputWorkspace"] = "Could not find a UB matrix in this workspace."

        return issues
Esempio n. 14
0
def add_directory_structure(dirs):
    """
    create the nested WorkspaceGroup structure in the ADS specified by the
    stored directory attribute.
    dirs = ["dir1", "dir2"] eg. ['Muon Data', 'MUSR72105', 'MUSR72105 Raw Data']
    """
    if not dirs:
        return
    if len(dirs) > len(set(dirs)):
        raise ValueError("Group names must be unique")

    for directory in dirs:
        if not mtd.doesExist(directory):
            workspace_group = api.WorkspaceGroup()
            mtd.addOrReplace(directory, workspace_group)
        elif not isinstance(mtd[directory], api.WorkspaceGroup):
            mtd.remove(directory)
            workspace_group = api.WorkspaceGroup()
            mtd.addOrReplace(directory, workspace_group)
        else:
            # exists and is a workspace group
            pass

    # Create the nested group structure in the ADS
    previous_dir = ""
    for i, directory in enumerate(dirs):
        if i == 0:
            previous_dir = directory
            continue
        if not mtd[previous_dir].__contains__(directory):
            mtd[previous_dir].add(directory)
        previous_dir = directory
Esempio n. 15
0
def _get_difc_ws(wksp, instr_ws=None):
    if wksp is None:
        return None
    # Check if given a workspace
    ws_str = str(wksp)
    difc_ws = None
    if not mtd.doesExist(ws_str):
        # Check if it was a file instead
        if ws_str.endswith(tuple([".h5", ".hd5", ".hdf", ".cal"])):
            try:
                LoadDiffCal(Filename=ws_str,
                            WorkspaceName="__cal_{}".format(ws_str))
                difc_ws = CalculateDIFC(
                    InputWorkspace="__cal_{}_group".format(ws_str),
                    CalibrationWorkspace="__cal_{}_cal".format(ws_str),
                    OutputWorkspace="__difc_{}".format(ws_str))
            except:
                raise RuntimeError(
                    "Could not load calibration file {}".format(ws_str))
        else:
            raise RuntimeError(
                "Could not find workspace {} in ADS and it was not a file".
                format(ws_str))
    else:
        # If workspace exists, check if it is a SpecialWorkspace2D (result from CalculateDIFC)
        if mtd[ws_str].id() == "SpecialWorkspace2D":
            difc_ws = mtd[ws_str]
        elif mtd[ws_str].id() == "TableWorkspace":
            if not mtd.doesExist(str(instr_ws)):
                raise RuntimeError(
                    "Expected instrument workspace instr_ws to use with calibration tables"
                )
            # Check if the workspace looks like a calibration workspace
            col_names = mtd[ws_str].getColumnNames()
            # Only need the first two columns for the CalculateDIFC algorithm to work
            if len(col_names) >= 2 and col_names[0] == "detid" and col_names[
                    1] == "difc":
                # Calculate DIFC on this workspace
                difc_ws = CalculateDIFC(
                    InputWorkspace=mtd[str(instr_ws)],
                    CalibrationWorkspace=mtd[ws_str],
                    OutputWorkspace="__difc_{}".format(ws_str))
        else:
            raise TypeError(
                "Wrong workspace type. Expects SpecialWorkspace2D, TableWorkspace, or a filename"
            )
    return difc_ws
Esempio n. 16
0
    def test_HKL_norm_and_KeepTemporary(self):
        ConvertWANDSCDtoQTest_out = ConvertWANDSCDtoQ(
            'ConvertWANDSCDtoQTest_data',
            NormalisationWorkspace='ConvertWANDSCDtoQTest_norm',
            Frame='HKL',
            KeepTemporaryWorkspaces=True,
            BinningDim0='-8.08,8.08,101',
            BinningDim1='-8.08,8.08,101',
            BinningDim2='-8.08,8.08,101',
            Uproj='1,1,0',
            Vproj='1,-1,0',
            Wproj='0,0,1')

        self.assertTrue(ConvertWANDSCDtoQTest_out)
        self.assertTrue(mtd.doesExist('ConvertWANDSCDtoQTest_out'))
        self.assertTrue(mtd.doesExist('ConvertWANDSCDtoQTest_out_data'))
        self.assertTrue(
            mtd.doesExist('ConvertWANDSCDtoQTest_out_normalization'))

        s = ConvertWANDSCDtoQTest_out.getSignalArray()
        self.assertAlmostEqual(np.nanmax(s), 4.646855396509936)
        self.assertAlmostEqual(np.nanargmax(s), 443011)

        self.assertEqual(ConvertWANDSCDtoQTest_out.getNumDims(), 3)
        self.assertEqual(ConvertWANDSCDtoQTest_out.getNPoints(), 101**3)

        d0 = ConvertWANDSCDtoQTest_out.getDimension(0)
        self.assertEqual(d0.name, '[H,H,0]')
        self.assertEqual(d0.getNBins(), 101)
        self.assertAlmostEquals(d0.getMinimum(), -8.08, 5)
        self.assertAlmostEquals(d0.getMaximum(), 8.08, 5)

        d1 = ConvertWANDSCDtoQTest_out.getDimension(1)
        self.assertEqual(d1.name, '[H,-H,0]')
        self.assertEqual(d1.getNBins(), 101)
        self.assertAlmostEquals(d1.getMinimum(), -8.08, 5)
        self.assertAlmostEquals(d1.getMaximum(), 8.08, 5)

        d2 = ConvertWANDSCDtoQTest_out.getDimension(2)
        self.assertEqual(d2.name, '[0,0,L]')
        self.assertEqual(d2.getNBins(), 101)
        self.assertAlmostEquals(d2.getMinimum(), -8.08, 5)
        self.assertAlmostEquals(d2.getMaximum(), 8.08, 5)

        self.assertEqual(ConvertWANDSCDtoQTest_out.getNumExperimentInfo(), 1)

        ConvertWANDSCDtoQTest_out.delete()
Esempio n. 17
0
 def cleanup(self, nf):
   for iif in range(nf):
     AnalysisDataService.remove('sim{0}'.format(iif))
   AnalysisDataService.remove('targetW')
   # Remove the fitting results, if present
   for suffix in 'NormalisedCovarianceMatrix Parameters Workspace':
     if mtd.doesExist('targetW_{0}'.format(suffix)):
       AnalysisDataService.remove('targetW_{0}'.format(suffix))
Esempio n. 18
0
 def cleanup(self, nf):
   for iif in range(nf):
     AnalysisDataService.remove('sim{0}'.format(iif))
   AnalysisDataService.remove('targetW')
   # Remove the fitting results, if present
   for suffix in 'NormalisedCovarianceMatrix Parameters Workspace':
     if mtd.doesExist('targetW_{0}'.format(suffix)):
       AnalysisDataService.remove('targetW_{0}'.format(suffix))
Esempio n. 19
0
 def _exportWorkspace(self, propName, wkspName):
     if wkspName and mtd.doesExist(wkspName):
         if not self.existsProperty(propName):
             self.declareProperty(WorkspaceProperty(propName,
                                                    wkspName,
                                                    Direction.Output))
         self.log().debug('Exporting workspace through property "{}"={}'.format(propName, wkspName))
         self.setProperty(propName, wkspName)
Esempio n. 20
0
 def _exportWorkspace(self, propName, wkspName):
     if wkspName and mtd.doesExist(wkspName):
         if not self.existsProperty(propName):
             self.declareProperty(WorkspaceProperty(propName,
                                                    wkspName,
                                                    Direction.Output))
         self.log().debug('Exporting workspace through property "{}"={}'.format(propName, wkspName))
         self.setProperty(propName, wkspName)
Esempio n. 21
0
    def hide(self):
        """
        Remove the workspace from the ADS and store it in the class instance
        """
        if mtd.doesExist(self._workspace_name):
            self._workspace = mtd[self._workspace_name]
            mtd.remove(self._workspace_name)

        self._is_in_ads = False
Esempio n. 22
0
def workspace_exists(ws_name):
    """
    check whether a workspace exists or not
    :param ws_name:
    :return:
    """
    checkdatatypes.check_string_variable('Workspace name', ws_name)

    return mtd.doesExist(ws_name)
Esempio n. 23
0
 def workspace(self, value):
     if not self.is_hidden:
         if mtd.doesExist(self._workspace_name):
             mtd.remove(self._workspace_name)
         self._is_in_ads = False
     if isinstance(value, Workspace):
         self._workspace = value
     else:
         raise AttributeError("Attempting to set object of type {}, must be"
                              " a Mantid Workspace type".format(type(value)))
 def _delete(self, ws):
     """Delete the given workspace in ws if it is not protected, and
     deletion is actually turned on.
     """
     if not self._doDelete:
         return
     try:
         ws = str(ws)
     except RuntimeError:
         return
     if ws not in self._protected and mtd.doesExist(ws):
         DeleteWorkspace(Workspace=ws, EnableLogging=self._deleteAlgorithmLogging)
 def _delete(self, ws):
     """Delete the given workspace in ws if it is not protected, and
     deletion is actually turned on.
     """
     if not self._doDelete:
         return
     try:
         ws = str(ws)
     except RuntimeError:
         return
     if ws not in self._protected and mtd.doesExist(ws):
         DeleteWorkspace(Workspace=ws, EnableLogging=self._deleteAlgorithmLogging)
Esempio n. 26
0
 def hide(self):
     """
     Remove the workspace from the ADS and store it in the class instance
     """
     if mtd.doesExist(self._workspace_name):
         self._workspace = mtd[self._workspace_name]
         mtd.remove(self._workspace_name)
         self._is_in_ads = False
         self._workspace_name = ""
         self._directory_structure = ""
     else:
         pass
Esempio n. 27
0
    def _getMaskWSname(self):
        masking = self.getProperty("Masking").value
        maskWSname = None
        if masking == 'Custom - xml masking file':
            maskWSname = 'CustomMask'
            LoadMask(InputFile=self.getProperty('MaskingFilename').value,
                     Instrument='SNAP', OutputWorkspace=maskWSname)
        elif masking == 'Horizontal':
            maskWSname = 'HorizontalMask'
            if not mtd.doesExist('HorizontalMask'):
                LoadMask(InputFile='/SNS/SNAP/shared/libs/Horizontal_Mask.xml',
                         Instrument='SNAP', OutputWorkspace=maskWSname)
        elif masking == 'Vertical':
            maskWSname = 'VerticalMask'
            if not mtd.doesExist('VerticalMask'):
                LoadMask(InputFile='/SNS/SNAP/shared/libs/Vertical_Mask.xml',
                         Instrument='SNAP', OutputWorkspace=maskWSname)
        elif masking == "Masking Workspace":
            maskWSname = str(self.getProperty("MaskingWorkspace").value)

        return maskWSname
    def test_HKL_norm_and_KeepTemporary(self):
        ConvertWANDSCDtoQTest_out = ConvertWANDSCDtoQ('ConvertWANDSCDtoQTest_data',NormalisationWorkspace='ConvertWANDSCDtoQTest_norm',
                                                      Frame='HKL',KeepTemporaryWorkspaces=True,BinningDim0='-8.08,8.08,101',
                                                      BinningDim1='-8.08,8.08,101',BinningDim2='-8.08,8.08,101',
                                                      Uproj='1,1,0',Vproj='1,-1,0',Wproj='0,0,1')

        self.assertTrue(ConvertWANDSCDtoQTest_out)
        self.assertTrue(mtd.doesExist('ConvertWANDSCDtoQTest_out'))
        self.assertTrue(mtd.doesExist('ConvertWANDSCDtoQTest_out_data'))
        self.assertTrue(mtd.doesExist('ConvertWANDSCDtoQTest_out_normalization'))

        s = ConvertWANDSCDtoQTest_out.getSignalArray()
        self.assertAlmostEqual(np.nanmax(s), 4.646855396509936)
        self.assertAlmostEqual(np.nanargmax(s), 443011)

        self.assertEquals(ConvertWANDSCDtoQTest_out.getNumDims(), 3)
        self.assertEquals(ConvertWANDSCDtoQTest_out.getNPoints(), 101**3)

        d0 = ConvertWANDSCDtoQTest_out.getDimension(0)
        self.assertEquals(d0.name, '[H,H,0]')
        self.assertEquals(d0.getNBins(), 101)
        self.assertAlmostEquals(d0.getMinimum(), -8.08, 5)
        self.assertAlmostEquals(d0.getMaximum(), 8.08, 5)

        d1 = ConvertWANDSCDtoQTest_out.getDimension(1)
        self.assertEquals(d1.name, '[H,-H,0]')
        self.assertEquals(d1.getNBins(), 101)
        self.assertAlmostEquals(d1.getMinimum(), -8.08, 5)
        self.assertAlmostEquals(d1.getMaximum(), 8.08, 5)

        d2 = ConvertWANDSCDtoQTest_out.getDimension(2)
        self.assertEquals(d2.name, '[0,0,L]')
        self.assertEquals(d2.getNBins(), 101)
        self.assertAlmostEquals(d2.getMinimum(), -8.08, 5)
        self.assertAlmostEquals(d2.getMaximum(), 8.08, 5)

        self.assertEqual(ConvertWANDSCDtoQTest_out.getNumExperimentInfo(), 1)

        ConvertWANDSCDtoQTest_out.delete()
 def hide(self):
     """
     Remove the workspace from the ADS and store it in the class instance
     """
     if mtd.doesExist(self._workspace_name):
         self._workspace = mtd[self._workspace_name]
         mtd.remove(self._workspace_name)
         self._is_in_ads = False
         self._workspace_name = ""
         self._directory_structure = ""
     else:
         raise RuntimeWarning(
             "Cannot remove workspace from ADS with name : {}".format(self._workspace_name))
Esempio n. 30
0
 def hide(self):
     """
     Remove the workspace from the ADS and store it in the class instance
     """
     if mtd.doesExist(self._workspace_name):
         self._workspace = mtd[self._workspace_name]
         mtd.remove(self._workspace_name)
         self._is_in_ads = False
         self._workspace_name = ""
         self._directory_structure = ""
     else:
         raise RuntimeWarning(
             "Cannot remove workspace from ADS with name : {}".format(
                 self._workspace_name))
Esempio n. 31
0
    def validateInputs(self):
        issues = dict()

        filelist = self.getProperty("Filename").value
        vanfile = self.getProperty("VanadiumFile").value
        input_ws = self.getProperty("InputWorkspaces")
        van_ws = self.getProperty("VanadiumWorkspace")
        wavelength = self.getProperty("Wavelength")

        # Make sure files and workspaces aren't both set
        if len(filelist) >= 1:
            if not input_ws.isDefault:
                issues[
                    'InputWorkspaces'] = "Cannot specify both a filename and input workspace"
        else:
            if input_ws.isDefault:
                issues[
                    'Filename'] = "Either a file or input workspace must be specified"

        if len(vanfile) > 0 and not van_ws.isDefault:
            issues[
                'VanadiumWorkspace'] = "Cannot specify both a vanadium file and workspace"

        # Verify given workspaces exist
        if not input_ws.isDefault:
            input_ws_list = list(map(str.strip, input_ws.value.split(",")))
            for ws in input_ws_list:
                if not mtd.doesExist(ws):
                    issues[
                        'InputWorkspaces'] = "Could not find input workspace '{}'".format(
                            ws)
                else:
                    # If it does exist, make sure the workspace is an MDHisto with 3 dimensions
                    if not isinstance(mtd[ws], IMDHistoWorkspace):
                        issues[
                            'InputWorkspaces'] = "Workspace '{}' must be a MDHistoWorkspace".format(
                                ws)
                    elif mtd[ws].getNumDims() != 3:
                        issues[
                            'InputWorkspaces'] = "Workspace '{}' expected to have 3 dimensions".format(
                                ws)

        if not wavelength.isDefault:
            if wavelength.value <= 0.0:
                issues['Wavelength'] = "Wavelength should be greater than zero"

        return issues
Esempio n. 32
0
def needs_loading(property_value, loading_reduction_type):
    """
    Checks whether a given unary input needs loading or is already loaded in
    ADS.
    @param property_value: the string value of the corresponding FileProperty
    @param loading_reduction_type : the reduction_type of input to load
    """
    loading = False
    ws_name = ''
    if property_value:
        ws_name = path.splitext(path.basename(property_value))[0]
        if mtd.doesExist(ws_name):
            logger.notice('Reusing {0} workspace: {1}'.format(
                loading_reduction_type, ws_name))
        else:
            loading = True
    return [loading, ws_name]
    def _rebin_result(self):                    #apply rebinning
        rebin_prog = Progress(self, start=0.0, end=0.8, nreports=3)
        rebin_prog.report('Rebin result ')

        logger.information('Rebin option : ' + self._rebin_option)
        qrange = ''
        if mtd.doesExist(self._sofq):                  #check if S(Q) WS exists
		    logger.information('Sofq data from Workspace : %s' % self._sofq)
        else:                                    #read from nxs file
            sofq_path = FileFinder.getFullPath(self._sofq + '.nxs')
            LoadNexusProcessed(Filename=sofq_path,
                               OutputWorkspace=self._sofq,
                               EnableLogging=False)
            logger.information('Sq data from File : %s' % sofq_path)
        rebin_logs = [('rebin_option', self._rebin_option)]
        if self._rebin_option != 'None':          #rebin to be applied
            rebin_logs.append(('rebin_qrange', self._rebin_qrange))
            logger.information('Rebin qrange : %s' % self._rebin_qrange)
            if self._rebin_qrange == 'New':          #new Q range
                mtd[self._final_q].setDistribution(True)
                xs = mtd[self._final_q].readX(0)
                new_dq = float(self._rebin_qinc)       #increment in Q
                xmax = (int(xs[len(xs) -1 ] / new_dq) + 1) * new_dq    #find number of points & Q max
                qrange = '0.0, %f, %f' % (new_dq, xmax)   #create Q range
                self._rebin(self._final_q, self._final_q, qrange)
                x = mtd[self._final_q].readX(0)
                xshift = 0.5 * (x[0] - x[1])
                self._scale_x(self._final_q, self._final_q, xshift)
                logger.information('Output S(Q) rebinned for range : %s' % qrange)
            if self._rebin_qrange == 'Snap':         #use input Q range
                gR = mtd[self._sofq].getRun()      #input S(Q) WS
                stype = gR.getLogData('input_type').value
                logger.information('Rebin option : %s' % self._rebin_option)
                if stype != 'Q':             #check input was in Q
                    raise ValueError('Input type must be Q for Snap option')
                if self._rebin_option == 'Interpolate':
                    self._rebin_ws(self._final_q, self._sofq, self._final_q)
                    logger.information('Output S(Q) interpolated to input S(Q) : %s' % self._sofq)
                if self._rebin_option == 'Spline':
                    self._spline_interp(self._sofq, self._final_q, self._final_q, '', 2)
                    logger.information('Output S(Q) spline interpolated to input S(Q) :%s ' % self._sofq)
                    rebin_logs.append(('rebin_Q_file', self._sofq))
        log_names = [item[0] for item in rebin_logs]
        log_values = [item[1] for item in rebin_logs]
#        self._add_sample_log_mult(self._final_q, log_names, log_values)
        logger.information('Corrected WS created : %s' % self._final_q)
Esempio n. 34
0
    def _generateGrouping(self, runnumber, metaWS, progress):
        group_to_real = {'Banks': 'Group', 'Modules': 'bank', '2_4 Grouping': '2_4Grouping'}
        group = self.getProperty('GroupDetectorsBy').value
        real_name = group_to_real.get(group, group)

        if not mtd.doesExist(group):
            if group == '2_4 Grouping':
                group = '2_4_Grouping'

            if not metaWS :
                metaWS = self._loadMetaWS(runnumber)
            CreateGroupingWorkspace(InputWorkspace=metaWS, GroupDetectorsBy=real_name,
                                    OutputWorkspace=group)
            progress.report('create grouping')
        else:
            progress.report()

        return group
Esempio n. 35
0
    def _generateGrouping(self, runnumber, metaWS, progress):
        group_to_real = {'Banks': 'Group', 'Modules': 'bank', '2_4 Grouping': '2_4Grouping'}
        group = self.getProperty('GroupDetectorsBy').value
        real_name = group_to_real.get(group, group)

        if not mtd.doesExist(group):
            if group == '2_4 Grouping':
                group = '2_4_Grouping'

            if metaWS is None:
                metaWS = self._loadMetaWS(runnumber)
            CreateGroupingWorkspace(InputWorkspace=metaWS, GroupDetectorsBy=real_name,
                                    OutputWorkspace=group)
            progress.report('create grouping')
        else:
            progress.report()

        return group
Esempio n. 36
0
 def _parseStructure(self, structure):
     from mantid.simpleapi import mtd, LoadCIF, CreateWorkspace, DeleteWorkspace
     import uuid
     self._fromCIF = False
     if isinstance(structure, string_types):
         if mtd.doesExist(structure):
             try:
                 self._cryst = self._copyCrystalStructure(mtd[structure].sample().getCrystalStructure())
                 self._getUniqueAtoms()
             except RuntimeError:
                 raise ValueError('Workspace ''%s'' has no valid CrystalStructure' % (structure))
         else:
             tmpws = CreateWorkspace(1, 1, OutputWorkspace='_tempPointCharge_'+str(uuid.uuid4())[:8])
             try:
                 LoadCIF(tmpws, structure)
                 # Attached CrystalStructure object gets destroyed when workspace is deleted
                 self._cryst = self._copyCrystalStructure(tmpws.sample().getCrystalStructure())
             except:
                 DeleteWorkspace(tmpws)
                 raise
             else:
                 DeleteWorkspace(tmpws)
                 self._getUniqueAtoms()
     elif isinstance(structure, list):
         if (len(structure) == 4 and all([isinstance(x, (int, float)) for x in structure])):
             structure = [structure]
         if (all([isinstance(x, list) and (len(x) == 4) and
            all([isinstance(y, (int, float)) for y in x]) for x in structure])):
             self._ligands = structure
         else:
             raise ValueError('Incorrect ligands direct input. Must be a 4-element list or a list '
                              'of 4-element list. Each ligand must be of the form [charge, x, y, z]')
     elif hasattr(structure, 'getScatterers'):
         self._cryst = structure
         self._getUniqueAtoms()
     else:
         if not hasattr(structure, 'sample'):
             raise ValueError('First input must be a Mantid CrystalStructure object, workspace or string '
                              '(name of CIF file or workspace)')
         try:
             self._cryst = self._copyCrystalStructure(structure.sample().getCrystalStructure())
             self._getUniqueAtoms()
         except RuntimeError:
             raise ValueError('Workspace ''%s'' has no valid CrystalStructure' % (structure.name()))
Esempio n. 37
0
 def _parseStructure(self, structure):
     from mantid.simpleapi import mtd, LoadCIF, CreateWorkspace, DeleteWorkspace
     import uuid
     self._fromCIF = False
     if isinstance(structure, string_types):
         if mtd.doesExist(structure):
             try:
                 self._cryst = self._copyCrystalStructure(mtd[structure].sample().getCrystalStructure())
                 self._getUniqueAtoms()
             except RuntimeError:
                 raise ValueError('Workspace ''%s'' has no valid CrystalStructure' % (structure))
         else:
             tmpws = CreateWorkspace(1, 1, OutputWorkspace='_tempPointCharge_'+str(uuid.uuid4())[:8])
             try:
                 LoadCIF(tmpws, structure)
                 # Attached CrystalStructure object gets destroyed when workspace is deleted
                 self._cryst = self._copyCrystalStructure(tmpws.sample().getCrystalStructure())
             except:
                 DeleteWorkspace(tmpws)
                 raise
             else:
                 DeleteWorkspace(tmpws)
                 self._getUniqueAtoms()
     elif isinstance(structure, list):
         if (len(structure) == 4 and all([isinstance(x, (int, float)) for x in structure])):
             structure = [structure]
         if (all([isinstance(x, list) and (len(x) == 4)
                  and all([isinstance(y, (int, float)) for y in x]) for x in structure])):
             self._ligands = structure
         else:
             raise ValueError('Incorrect ligands direct input. Must be a 4-element list or a list '
                              'of 4-element list. Each ligand must be of the form [charge, x, y, z]')
     elif hasattr(structure, 'getScatterers'):
         self._cryst = structure
         self._getUniqueAtoms()
     else:
         if not hasattr(structure, 'sample'):
             raise ValueError('First input must be a Mantid CrystalStructure object, workspace or string '
                              '(name of CIF file or workspace)')
         try:
             self._cryst = self._copyCrystalStructure(structure.sample().getCrystalStructure())
             self._getUniqueAtoms()
         except RuntimeError:
             raise ValueError('Workspace ''%s'' has no valid CrystalStructure' % (structure.name()))
Esempio n. 38
0
def plot_corr(tof_ws):
    """
    Plots Pearson correlation coefficient for each detector
    :param tof_ws: Workspace returned from collect_fit_result
    :return: plot, plot axes
    """
    if not mtd.doesExist(str(tof_ws)):
        raise ValueError("Could not find the provided workspace in ADS")

    tof_ws = mtd[str(tof_ws)]

    numHist = tof_ws.getNumberHistograms()

    # Create an array for Pearson corr coef
    r_vals = np.empty((numHist, ), dtype=float)
    r_vals.fill(np.nan)

    # Create an array for detector IDs
    detectors = tof_ws.detectorInfo().detectorIDs()
    detID = np.empty((numHist, ), dtype=float)
    detID.fill(np.nan)

    for workspaceIndex in range(numHist):
        # Get Pearson correlation coefficient for each detector
        x = tof_ws.dataY(workspaceIndex)
        y = tof_ws.dataX(workspaceIndex)

        mask = np.logical_not(np.isnan(x))
        if np.sum(mask) > 1:
            r, p = np.corrcoef(x[mask], y[mask])
            # Use r[1] because the corr coef is always the off-diagonal element here
            r_vals[workspaceIndex] = r[1]
        else:
            r_vals[workspaceIndex] = np.nan

        # Get detector ID for this spectrum
        detID[workspaceIndex] = detectors[workspaceIndex]

    fig, ax = plt.subplots()
    ax.set_xlabel("det IDs")
    ax.set_ylabel("Pearson correlation coefficient (TOF, d)")

    ax.plot(detID, r_vals, marker="x", linestyle="None")
Esempio n. 39
0
def convertToHKL(ws,
                 OutputWorkspace='__md_hkl',
                 UB=None,
                 Append=False,
                 scale=None,
                 BinningDim0='-10.05,10.05,201',
                 BinningDim1='-10.05,10.05,201',
                 BinningDim2='-10.05,10.05,201',
                 Uproj=(1, 0, 0),
                 Vproj=(0, 1, 0),
                 Wproj=(0, 0, 1)):
    """Output MDHistoWorkspace in HKL
    """

    SetUB(ws, UB=UB)

    ConvertToMD(ws,
                QDimensions='Q3D',
                QConversionScales='HKL',
                dEAnalysisMode='Elastic',
                Q3DFrames='HKL',
                OutputWorkspace='__temp',
                Uproj=Uproj,
                Vproj=Vproj,
                Wproj=Wproj)

    if scale is not None:
        mtd['__temp'] *= scale

    BinMD(InputWorkspace='__temp',
          TemporaryDataWorkspace=OutputWorkspace
          if Append and mtd.doesExist(OutputWorkspace) else None,
          OutputWorkspace=OutputWorkspace,
          AlignedDim0=mtd['__temp'].getDimension(0).name + ',' + BinningDim0,
          AlignedDim1=mtd['__temp'].getDimension(1).name + ',' + BinningDim1,
          AlignedDim2=mtd['__temp'].getDimension(2).name + ',' + BinningDim2)
    DeleteWorkspace('__temp')

    return OutputWorkspace
Esempio n. 40
0
def needs_processing(property_value, process_reduction_type):
    """
    Checks whether a given unary reduction needs processing or is already cached
    in ADS with expected name.
    @param property_value: the string value of the corresponding MultipleFile
                           input property
    @param process_reduction_type: the reduction_type of process
    """
    do_process = False
    ws_name = ''
    if property_value:
        run_number = get_run_number(property_value)
        ws_name = run_number + '_' + process_reduction_type
        if mtd.doesExist(ws_name):
            if isinstance(mtd[ws_name], WorkspaceGroup):
                run = mtd[ws_name][0].getRun()
            else:
                run = mtd[ws_name].getRun()
            if run.hasProperty('ProcessedAs'):
                process = run.getLogData('ProcessedAs').value
                if process == process_reduction_type:
                    logger.notice('Reusing {0} workspace: {1}'.format(
                        process_reduction_type, ws_name))
                else:
                    logger.warning('{0} workspace found, but processed '
                                   'differently: {1}'.format(
                                       process_reduction_type, ws_name))
                    do_process = True
            else:
                logger.warning('{0} workspace found, but missing the '
                               'ProcessedAs flag: {1}'.format(
                                   process_reduction_type, ws_name))
                do_process = True
        else:
            do_process = True
    return [do_process, ws_name]
Esempio n. 41
0
def calc_absorption_corr_using_wksp(
    donor_wksp,
    abs_method,
    element_size=1,
    prefix_name="",
    cache_dirs=[],
):
    """
    Calculates absorption correction on the specified donor workspace. See the documentation
    for the ``calculate_absorption_correction`` function above for more details.

    :param donor_wksp: Input workspace to compute absorption correction on
    :param abs_method: Type of absorption correction: None, SampleOnly, SampleAndContainer, FullPaalmanPings
    :param element_size: Size of one side of the integration element cube in mm
    :param prefix_name: Optional prefix of the output workspaces, default is the donor_wksp name.
    :param cache_dirs: List of candidate cache directories to store cached abs workspace.

    :return: Two workspaces (A_s, A_c), the first for the sample and the second for the container
    """
    if abs_method == "None":
        return "", ""

    if isinstance(donor_wksp, str):
        if not mtd.doesExist(donor_wksp):
            raise RuntimeError(
                "Specified donor workspace not found in the ADS")
        donor_wksp = mtd[donor_wksp]

    absName = donor_wksp.name()
    if prefix_name != '':
        absName = prefix_name

    if abs_method == "SampleOnly":
        AbsorptionCorrection(donor_wksp,
                             OutputWorkspace=absName + '_ass',
                             ScatterFrom='Sample',
                             ElementSize=element_size)
        return absName + '_ass', ""
    elif abs_method == "SampleAndContainer":
        AbsorptionCorrection(donor_wksp,
                             OutputWorkspace=absName + '_ass',
                             ScatterFrom='Sample',
                             ElementSize=element_size)
        AbsorptionCorrection(donor_wksp,
                             OutputWorkspace=absName + '_acc',
                             ScatterFrom='Container',
                             ElementSize=element_size)
        return absName + '_ass', absName + '_acc'
    elif abs_method == "FullPaalmanPings":
        PaalmanPingsAbsorptionCorrection(donor_wksp,
                                         OutputWorkspace=absName,
                                         ElementSize=element_size)
        Multiply(LHSWorkspace=absName + '_acc',
                 RHSWorkspace=absName + '_assc',
                 OutputWorkspace=absName + '_ac')
        Divide(LHSWorkspace=absName + '_ac',
               RHSWorkspace=absName + '_acsc',
               OutputWorkspace=absName + '_ac')
        return absName + '_assc', absName + '_ac'
    else:
        raise ValueError(
            "Unrecognized absorption correction method '{}'".format(
                abs_method))
Esempio n. 42
0
def __load_cached_data(cache_files, sha1, abs_method="", prefix_name=""):
    """try to load cached data from memory and disk

    :param abs_method: absorption calculation method
    :param sha1: SHA1 that identify cached workspace
    :param cache_files: list of cache file names to search
    :param prefix_name: prefix to add to wkspname for caching

    return  found_abs_wksp_sample, found_abs_wksp_container
            abs_wksp_sample, abs_wksp_container, cache_files[ 0 ]
    """
    # init
    abs_wksp_sample, abs_wksp_container = "", ""
    found_abs_wksp_sample, found_abs_wksp_container = False, False

    # step_0: depending on the abs_method, suffix will be different
    if abs_method == "SampleOnly":
        abs_wksp_sample = f"{prefix_name}_ass"
        found_abs_wksp_container = True
    elif abs_method == "SampleAndContainer":
        abs_wksp_sample = f"{prefix_name}_ass"
        abs_wksp_container = f"{prefix_name}_acc"
    elif abs_method == "FullPaalmanPings":
        abs_wksp_sample = f"{prefix_name}_assc"
        abs_wksp_container = f"{prefix_name}_ac"
    else:
        raise ValueError(
            "Unrecognized absorption correction method '{}'".format(
                abs_method))

    # step_1: check memory
    if mtd.doesExist(abs_wksp_sample):
        found_abs_wksp_sample = mtd[abs_wksp_sample].run(
        )["absSHA1"].value == sha1
    if mtd.doesExist(abs_wksp_container):
        found_abs_wksp_container = mtd[abs_wksp_container].run(
        )["absSHA1"].value == sha1

    # step_2: load from disk if either is not found in memory
    if (not found_abs_wksp_sample) or (not found_abs_wksp_container):
        for candidate in cache_files:
            if os.path.exists(candidate):
                wsntmp = "tmpwsg"
                Load(Filename=candidate, OutputWorkspace=wsntmp)
                wstype = mtd[wsntmp].id()
                if wstype == "Workspace2D":
                    RenameWorkspace(InputWorkspace=wsntmp,
                                    OutputWorkspace=abs_wksp_sample)
                elif wstype == "WorkspaceGroup":
                    UnGroupWorkspace(InputWorkspace=wsntmp)
                else:
                    raise ValueError(
                        f"Unsupported cached workspace type: {wstype}")
                break

    # step_3: check memory again
    if mtd.doesExist(abs_wksp_sample):
        found_abs_wksp_sample = mtd[abs_wksp_sample].run(
        )["absSHA1"].value == sha1
    if mtd.doesExist(abs_wksp_container):
        found_abs_wksp_container = mtd[abs_wksp_container].run(
        )["absSHA1"].value == sha1

    return found_abs_wksp_sample, found_abs_wksp_container, abs_wksp_sample, abs_wksp_container, cache_files[
        0]
Esempio n. 43
0
    def PyExec(self):
        data = self.getProperty("InputWorkspace").value
        cal = self.getProperty("CalibrationWorkspace").value
        bkg = self.getProperty("BackgroundWorkspace").value
        mask = self.getProperty("MaskWorkspace").value
        target = self.getProperty("Target").value
        eFixed = self.getProperty("EFixed").value
        xMin = self.getProperty("XMin").value
        xMax = self.getProperty("XMax").value
        numberBins = self.getProperty("NumberBins").value
        normaliseBy = self.getProperty("NormaliseBy").value
        maskAngle = self.getProperty("MaskAngle").value
        outWS = self.getPropertyValue("OutputWorkspace")

        data_scale = 1
        cal_scale = 1
        bkg_scale = 1

        if normaliseBy == "Monitor":
            data_scale = data.run().getProtonCharge()
        elif normaliseBy == "Time":
            data_scale = data.run().getLogData('duration').value

        ExtractMask(data, OutputWorkspace='__mask_tmp', EnableLogging=False)

        if maskAngle != Property.EMPTY_DBL:
            MaskAngle(Workspace='__mask_tmp', MinAngle=maskAngle, Angle='Phi', EnableLogging=False)

        if mask is not None:
            BinaryOperateMasks(InputWorkspace1='__mask_tmp', InputWorkspace2=mask,
                               OperationType='OR', OutputWorkspace='__mask_tmp', EnableLogging=False)

        ExtractUnmaskedSpectra(InputWorkspace=data, MaskWorkspace='__mask_tmp', OutputWorkspace='__data_tmp', EnableLogging=False)
        ConvertSpectrumAxis(InputWorkspace='__data_tmp', Target=target, EFixed=eFixed, OutputWorkspace=outWS, EnableLogging=False)
        Transpose(InputWorkspace=outWS, OutputWorkspace=outWS, EnableLogging=False)
        ResampleX(InputWorkspace=outWS, OutputWorkspace=outWS, XMin=xMin, XMax=xMax, NumberBins=numberBins, EnableLogging=False)

        if cal is not None:
            ExtractUnmaskedSpectra(InputWorkspace=cal, MaskWorkspace='__mask_tmp', OutputWorkspace='__cal_tmp', EnableLogging=False)
            CopyInstrumentParameters(data, '__cal_tmp', EnableLogging=False)
            ConvertSpectrumAxis(InputWorkspace='__cal_tmp', Target=target, EFixed=eFixed, OutputWorkspace='__cal_tmp', EnableLogging=False)
            Transpose(InputWorkspace='__cal_tmp', OutputWorkspace='__cal_tmp', EnableLogging=False)
            ResampleX(InputWorkspace='__cal_tmp', OutputWorkspace='__cal_tmp', XMin=xMin, XMax=xMax, NumberBins=numberBins,
                      EnableLogging=False)
            Divide(LHSWorkspace=outWS, RHSWorkspace='__cal_tmp', OutputWorkspace=outWS, EnableLogging=False)
            if normaliseBy == "Monitor":
                cal_scale = cal.run().getProtonCharge()
            elif normaliseBy == "Time":
                cal_scale = cal.run().getLogData('duration').value

        Scale(InputWorkspace=outWS, OutputWorkspace=outWS, Factor=cal_scale/data_scale, EnableLogging=False)

        if bkg is not None:
            ExtractUnmaskedSpectra(InputWorkspace=bkg, MaskWorkspace='__mask_tmp', OutputWorkspace='__bkg_tmp', EnableLogging=False)
            CopyInstrumentParameters(data, '__bkg_tmp', EnableLogging=False)
            ConvertSpectrumAxis(InputWorkspace='__bkg_tmp', Target=target, EFixed=eFixed, OutputWorkspace='__bkg_tmp', EnableLogging=False)
            Transpose(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp', EnableLogging=False)
            ResampleX(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp', XMin=xMin, XMax=xMax, NumberBins=numberBins,
                      EnableLogging=False)
            if cal is not None:
                Divide(LHSWorkspace='__bkg_tmp', RHSWorkspace='__cal_tmp', OutputWorkspace='__bkg_tmp', EnableLogging=False)
            if normaliseBy == "Monitor":
                bkg_scale = bkg.run().getProtonCharge()
            elif normaliseBy == "Time":
                bkg_scale = bkg.run().getLogData('duration').value
            Scale(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp', Factor=cal_scale/bkg_scale, EnableLogging=False)
            Scale(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp',
                  Factor=self.getProperty('BackgroundScale').value, EnableLogging=False)
            Minus(LHSWorkspace=outWS, RHSWorkspace='__bkg_tmp', OutputWorkspace=outWS, EnableLogging=False)

        self.setProperty("OutputWorkspace", outWS)

        # remove temp workspaces
        [DeleteWorkspace(ws, EnableLogging=False) for ws in self.temp_workspace_list if mtd.doesExist(ws)]
Esempio n. 44
0
    def PyExec(self):
        in_Runs = self.getProperty("RunNumbers").value
        maskWSname = self._getMaskWSname()
        progress = Progress(self, 0., .25, 3)

        # default arguments for AlignAndFocusPowder
        alignAndFocusArgs = {'TMax': 50000,
                             'RemovePromptPulseWidth': 1600,
                             'PreserveEvents': False,
                             'Dspacing': True,  # binning parameters in d-space
                             'Params': self.getProperty("Binning").value}

        # workspace for loading metadata only to be used in LoadDiffCal and
        # CreateGroupingWorkspace
        metaWS = None

        # either type of file-based calibration is stored in the same variable
        calib = self.getProperty("Calibration").value
        detcalFile = None
        if calib == "Calibration File":
            metaWS = self._loadMetaWS(in_Runs[0])
            LoadDiffCal(Filename=self.getPropertyValue("CalibrationFilename"),
                        WorkspaceName='SNAP',
                        InputWorkspace=metaWS,
                        MakeGroupingWorkspace=False, MakeMaskWorkspace=False)
            alignAndFocusArgs['CalibrationWorkspace'] = 'SNAP_cal'
        elif calib == 'DetCal File':
            detcalFile = ','.join(self.getProperty('DetCalFilename').value)
        progress.report('loaded calibration')

        norm = self.getProperty("Normalization").value

        if norm == "From Processed Nexus":
            norm_File = self.getProperty("NormalizationFilename").value
            normalizationWS = 'normWS'
            LoadNexusProcessed(Filename=norm_File, OutputWorkspace=normalizationWS)
            progress.report('loaded normalization')
        elif norm == "From Workspace":
            normalizationWS = str(self.getProperty("NormalizationWorkspace").value)
            progress.report('')
        else:
            normalizationWS = None
            progress.report('')

        group = self._generateGrouping(in_Runs[0], metaWS, progress)

        if metaWS is not None:
            DeleteWorkspace(Workspace=metaWS)

        Process_Mode = self.getProperty("ProcessingMode").value

        prefix = self.getProperty("OptionalPrefix").value

        # --------------------------- REDUCE DATA -----------------------------

        Tag = 'SNAP'
        if self.getProperty("LiveData").value:
            Tag = 'Live'

        progStart = .25
        progDelta = (1.-progStart)/len(in_Runs)
        for i, runnumber in enumerate(in_Runs):
            self.log().notice("processing run %s" % runnumber)
            self.log().information(str(self.get_IPTS_Local(runnumber)))

            # put together output names
            new_Tag = Tag
            if len(prefix) > 0:
                new_Tag += '_' + prefix
            basename = '%s_%s_%s' % (new_Tag, runnumber, group)

            if self.getProperty("LiveData").value:
                raise RuntimeError('Live data is not currently supported')
            else:
                Load(Filename='SNAP' + str(runnumber), OutputWorkspace=basename + '_red', startProgress=progStart,
                     endProgress=progStart + .25 * progDelta)
                progStart += .25 * progDelta
            redWS = basename + '_red'

            # overwrite geometry with detcal files
            if calib == 'DetCal File':
                LoadIsawDetCal(InputWorkspace=redWS, Filename=detcalFile)

            # create unfocussed data if in set-up mode
            if Process_Mode == "Set-Up":
                unfocussedWksp = '{}_{}_d'.format(new_Tag, runnumber)
            else:
                unfocussedWksp = ''

            AlignAndFocusPowder(InputWorkspace=redWS, OutputWorkspace=redWS,
                                MaskWorkspace=maskWSname,  # can be empty string
                                GroupingWorkspace=group,
                                UnfocussedWorkspace=unfocussedWksp,  # can be empty string
                                startProgress=progStart,
                                endProgress=progStart + .5 * progDelta,
                                **alignAndFocusArgs)
            progStart += .5 * progDelta

            # the rest takes up .25 percent of the run processing
            progress = Progress(self, progStart, progStart+.25*progDelta, 2)

            # AlignAndFocusPowder leaves the data in time-of-flight
            ConvertUnits(InputWorkspace=redWS, OutputWorkspace=redWS, Target='dSpacing', EMode='Elastic')

            # Edit instrument geometry to make final workspace smaller on disk
            det_table = PreprocessDetectorsToMD(Inputworkspace=redWS,
                                                OutputWorkspace='__SNAP_det_table')
            polar = np.degrees(det_table.column('TwoTheta'))
            azi = np.degrees(det_table.column('Azimuthal'))
            EditInstrumentGeometry(Workspace=redWS, L2=det_table.column('L2'),
                                   Polar=polar, Azimuthal=azi)
            mtd.remove('__SNAP_det_table')
            progress.report('simplify geometry')

            # AlignAndFocus doesn't necessarily rebin the data correctly
            if Process_Mode == "Set-Up":
                Rebin(InputWorkspace=unfocussedWksp, Params=alignAndFocusArgs['Params'],
                      Outputworkspace=unfocussedWksp)

            NormaliseByCurrent(InputWorkspace=redWS, OutputWorkspace=redWS)

            # normalize the data as requested
            normalizationWS = self._generateNormalization(redWS, norm, normalizationWS)
            normalizedWS = None
            if normalizationWS is not None:
                normalizedWS = basename + '_nor'
                Divide(LHSWorkspace=redWS, RHSWorkspace=normalizationWS,
                       OutputWorkspace=normalizedWS)
                ReplaceSpecialValues(Inputworkspace=normalizedWS,
                                     OutputWorkspace=normalizedWS,
                                     NaNValue='0', NaNError='0',
                                     InfinityValue='0', InfinityError='0')
                progress.report('normalized')
            else:
                progress.report()

            # rename everything as appropriate and determine output workspace name
            if normalizedWS is None:
                outputWksp = redWS
            else:
                outputWksp = normalizedWS

                if norm == "Extracted from Data" and Process_Mode == "Production":
                        DeleteWorkspace(Workspace=redWS)
                        DeleteWorkspace(Workspace=normalizationWS)

            # Save requested formats
            saveDir = self.getPropertyValue("OutputDirectory").strip()
            if len(saveDir) <= 0:
                self.log().notice('Using default save location')
                saveDir = os.path.join(self.get_IPTS_Local(runnumber), 'shared', 'data')
            self._save(saveDir, basename, outputWksp)

            # set workspace as an output so it gets history
            propertyName = 'OutputWorkspace_'+str(outputWksp)
            self.declareProperty(WorkspaceProperty(
                propertyName, outputWksp, Direction.Output))
            self.setProperty(propertyName, outputWksp)

            # declare some things as extra outputs in set-up
            if Process_Mode != "Production":
                prefix = 'OuputWorkspace_{:d}_'.format(i)
                propNames = [prefix + it for it in ['d', 'norm', 'normalizer']]
                wkspNames = ['%s_%s_d' % (new_Tag, runnumber),
                             basename + '_red',
                             '%s_%s_normalizer' % (new_Tag, runnumber)]
                for (propName, wkspName) in zip(propNames, wkspNames):
                    if mtd.doesExist(wkspName):
                        self.declareProperty(WorkspaceProperty(propName,
                                                               wkspName,
                                                               Direction.Output))
                        self.setProperty(propName, wkspName)
Esempio n. 45
0
    def PyExec(self):
        # Retrieve all relevant notice

        in_Runs = self.getProperty("RunNumbers").value

        maskWSname = self._getMaskWSname()

        # either type of file-based calibration is stored in the same variable
        calib = self.getProperty("Calibration").value
        if calib == "Calibration File":
            cal_File = self.getProperty("CalibrationFilename").value
        elif calib == 'DetCal File':
            cal_File = self.getProperty('DetCalFilename').value
            cal_File = ','.join(cal_File)
        else:
            cal_File = None

        params = self.getProperty("Binning").value
        norm = self.getProperty("Normalization").value

        if norm == "From Processed Nexus":
            norm_File = self.getProperty("NormalizationFilename").value
            LoadNexusProcessed(Filename=norm_File, OutputWorkspace='normWS')
            normWS = 'normWS'
        elif norm == "From Workspace":
            normWS = str(self.getProperty("NormalizationWorkspace").value)
        else:
            normWS = None

        group_to_real = {
            'Banks': 'Group',
            'Modules': 'bank',
            '2_4 Grouping': '2_4Grouping'
        }
        group = self.getProperty('GroupDetectorsBy').value
        real_name = group_to_real.get(group, group)

        if not mtd.doesExist(group):
            if group == '2_4 Grouping':
                group = '2_4_Grouping'
            CreateGroupingWorkspace(InstrumentName='SNAP',
                                    GroupDetectorsBy=real_name,
                                    OutputWorkspace=group)

        Process_Mode = self.getProperty("ProcessingMode").value

        prefix = self.getProperty("OptionalPrefix").value

        # --------------------------- REDUCE DATA -----------------------------

        Tag = 'SNAP'
        for r in in_Runs:
            self.log().notice("processing run %s" % r)
            self.log().information(str(self.get_IPTS_Local(r)))
            if self.getProperty("LiveData").value:
                Tag = 'Live'
                LoadPreNexusLive(Instrument='SNAP', OutputWorkspace='WS')
            else:
                Load(Filename='SNAP' + str(r), OutputWorkspace='WS')
                NormaliseByCurrent(InputWorkspace='WS', OutputWorkspace='WS')

            CompressEvents(InputWorkspace='WS', OutputWorkspace='WS')
            CropWorkspace(InputWorkspace='WS',
                          OutputWorkspace='WS',
                          XMax=50000)
            RemovePromptPulse(InputWorkspace='WS',
                              OutputWorkspace='WS',
                              Width='1600',
                              Frequency='60.4')

            if maskWSname is not None:
                MaskDetectors(Workspace='WS', MaskedWorkspace=maskWSname)

            self._alignAndFocus(params, calib, cal_File, group)

            normWS = self._generateNormalization('WS_red', norm, normWS)
            WS_nor = None
            if normWS is not None:
                WS_nor = 'WS_nor'
                Divide(LHSWorkspace='WS_red',
                       RHSWorkspace=normWS,
                       OutputWorkspace='WS_nor')
                ReplaceSpecialValues(Inputworkspace='WS_nor',
                                     OutputWorkspace='WS_nor',
                                     NaNValue='0',
                                     NaNError='0',
                                     InfinityValue='0',
                                     InfinityError='0')

            new_Tag = Tag
            if len(prefix) > 0:
                new_Tag += '_' + prefix

            # Edit instrument geomety to make final workspace smaller on disk
            det_table = PreprocessDetectorsToMD(
                Inputworkspace='WS_red', OutputWorkspace='__SNAP_det_table')
            polar = np.degrees(det_table.column('TwoTheta'))
            azi = np.degrees(det_table.column('Azimuthal'))
            EditInstrumentGeometry(Workspace='WS_red',
                                   L2=det_table.column('L2'),
                                   Polar=polar,
                                   Azimuthal=azi)
            if WS_nor is not None:
                EditInstrumentGeometry(Workspace='WS_nor',
                                       L2=det_table.column('L2'),
                                       Polar=polar,
                                       Azimuthal=azi)
            mtd.remove('__SNAP_det_table')

            # Save requested formats
            basename = '%s_%s_%s' % (new_Tag, r, group)
            self._save(r, basename, norm)

            # temporary workspace no longer needed
            DeleteWorkspace(Workspace='WS')

            # rename everything as appropriate and determine output workspace name
            RenameWorkspace(Inputworkspace='WS_d',
                            OutputWorkspace='%s_%s_d' % (new_Tag, r))
            RenameWorkspace(Inputworkspace='WS_red',
                            OutputWorkspace=basename + '_red')
            if norm == 'None':
                outputWksp = basename + '_red'
            else:
                outputWksp = basename + '_nor'
                RenameWorkspace(Inputworkspace='WS_nor',
                                OutputWorkspace=basename + '_nor')
            if norm == "Extracted from Data":
                RenameWorkspace(Inputworkspace='peak_clip_WS',
                                OutputWorkspace='%s_%s_normalizer' %
                                (new_Tag, r))

            # delte some things in production
            if Process_Mode == "Production":
                DeleteWorkspace(Workspace='%s_%s_d' %
                                (new_Tag, r))  # was 'WS_d'

                if norm != "None":
                    DeleteWorkspace(Workspace=basename +
                                    '_red')  # was 'WS_red'

                if norm == "Extracted from Data":
                    DeleteWorkspace(Workspace='%s_%s_normalizer' %
                                    (new_Tag, r))  # was 'peak_clip_WS'

            propertyName = 'OutputWorkspace_' + str(outputWksp)
            self.declareProperty(
                WorkspaceProperty(propertyName, outputWksp, Direction.Output))
            self.setProperty(propertyName, outputWksp)
Esempio n. 46
0
    def PyExec(self):
        # Retrieve all relevant notice

        in_Runs = self.getProperty("RunNumbers").value

        maskWSname = self._getMaskWSname()

        calib = self.getProperty("Calibration").value
        if calib == "Calibration File":
            cal_File = self.getProperty("CalibrationFilename").value

        params = self.getProperty("Binning").value
        norm = self.getProperty("Normalization").value

        if norm == "From Processed Nexus":
            norm_File = self.getProperty("Normalization filename").value
            normWS = LoadNexusProcessed(Filename=norm_File)
        elif norm == "From Workspace":
            normWS = self.getProperty("NormalizationWorkspace").value
        else:
            normWS = None

        group_to_real = {'Banks':'Group', 'Modules':'bank', '2_4 Grouping':'2_4_Grouping'}
        group = self.getProperty("GroupDetectorsBy").value
        real_name = group_to_real.get(group, group)

        if not mtd.doesExist(group):
            if group == "2_4 Grouping":
                group = real_name
                LoadDetectorsGroupingFile(InputFile=r'/SNS/SNAP/shared/libs/SNAP_group_2_4.xml',
                                          OutputWorkspace=group)
            else:
                CreateGroupingWorkspace(InstrumentName='SNAP', GroupDetectorsBy=real_name,
                                        OutputWorkspace=group)

        Process_Mode = self.getProperty("ProcessingMode").value

        prefix = self.getProperty("OptionalPrefix").value

        # --------------------------- REDUCE DATA -----------------------------

        Tag = 'SNAP'
        for r in in_Runs:
            self.log().notice("processing run %s" % r)
            self.log().information(str(self.get_IPTS_Local(r)))
            if self.getProperty("LiveData").value:
                Tag = 'Live'
                WS = LoadPreNexusLive(Instrument='SNAP')
            else:
                WS = Load(Filename='SNAP' + str(r), Outputworkspace='WS')
                WS = NormaliseByCurrent(InputWorkspace=WS,
                                        Outputworkspace='WS')

            WS = CompressEvents(InputWorkspace=WS, Outputworkspace='WS')
            WS = CropWorkspace(InputWorkspace='WS',
                               OutputWorkspace='WS', XMax=50000)
            WS = RemovePromptPulse(InputWorkspace=WS, OutputWorkspace='WS',
                                   Width='1600', Frequency='60.4')

            if maskWSname is not None:
                WS = MaskDetectors(Workspace=WS, MaskedWorkspace=maskWSname)

            if calib == "Convert Units":
                WS_d = ConvertUnits(InputWorkspace='WS',
                                    Target='dSpacing', Outputworkspace='WS_d')
            else:
                self.log().notice("\n calibration file : %s" % cal_File)
                WS_d = AlignDetectors(
                    InputWorkspace='WS', CalibrationFile=cal_File, Outputworkspace='WS_d')

            WS_d = Rebin(InputWorkspace=WS_d, Params=params,
                         Outputworkspace='WS_d')

            WS_red = DiffractionFocussing(InputWorkspace=WS_d, GroupingWorkspace=group,
                                          PreserveEvents=False)

            normWS = self._generateNormalization(WS_red, norm, normWS)
            WS_nor = None
            if normWS is not None:
                WS_nor = Divide(LHSWorkspace=WS_red, RHSWorkspace=normWS)
                WS_nor = ReplaceSpecialValues(Inputworkspace=WS_nor,
                                              NaNValue='0', NaNError='0',
                                              InfinityValue='0', InfinityError='0')

            new_Tag = Tag
            if len(prefix) > 0:
                new_Tag += '_' + prefix

            # Edit instrument geomety to make final workspace smaller on disk
            det_table = PreprocessDetectorsToMD(Inputworkspace='WS_red',
                                                OutputWorkspace='__SNAP_det_table')
            polar = np.degrees(det_table.column('TwoTheta'))
            azi = np.degrees(det_table.column('Azimuthal'))
            EditInstrumentGeometry(Workspace="WS_red", L2=det_table.column('L2'),
                                   Polar=polar, Azimuthal=azi)
            if WS_nor is not None:
                EditInstrumentGeometry(Workspace="WS_nor", L2=det_table.column('L2'),
                                       Polar=polar, Azimuthal=azi)
            mtd.remove('__SNAP_det_table')

            # Save requested formats
            basename = '%s_%s_%s' % (new_Tag, r, group)
            self._save(r, basename, norm)

            # temporary workspace no longer needed
            DeleteWorkspace(Workspace='WS')

            # rename everything as appropriate and determine output workspace name
            RenameWorkspace(Inputworkspace='WS_d',
                            OutputWorkspace='%s_%s_d' % (new_Tag, r))
            RenameWorkspace(Inputworkspace='WS_red',
                            OutputWorkspace=basename + '_red')
            if norm == 'None':
                outputWksp = basename + '_red'
            else:
                outputWksp = basename + '_nor'
                RenameWorkspace(Inputworkspace='WS_nor',
                                OutputWorkspace=basename + '_nor')
            if norm == "Extracted from Data":
                RenameWorkspace(Inputworkspace='peak_clip_WS',
                                OutputWorkspace='%s_%s_normalizer' % (new_Tag, r))

            # delte some things in production
            if Process_Mode == "Production":
                DeleteWorkspace(Workspace='%s_%s_d' % (new_Tag, r)) # was 'WS_d'

                if norm != "None":
                    DeleteWorkspace(Workspace=basename + '_red') # was 'WS_red'

                if norm == "Extracted from Data":
                    DeleteWorkspace(Workspace='%s_%s_normalizer' % (new_Tag, r)) # was 'peak_clip_WS'

            propertyName = 'OutputWorkspace'
            self.declareProperty(WorkspaceProperty(
                propertyName, outputWksp, Direction.Output))
            self.setProperty(propertyName, outputWksp)