Ejemplo n.º 1
0
    def _save(self, runnumber, basename, norm):
        if not self.getProperty("SaveData").value:
            return

        saveDir = self.getProperty("OutputDirectory").value.strip()
        if len(saveDir) <= 0:
            self.log().notice('Using default save location')
            saveDir = os.path.join(
                self.get_IPTS_Local(runnumber), 'shared', 'data')
        self.log().notice('Writing to \'' + saveDir + '\'')

        if norm == 'None':
            SaveNexusProcessed(InputWorkspace='WS_red',
                               Filename=os.path.join(saveDir, 'nexus', basename + '.nxs'))
            SaveAscii(InputWorkspace='WS_red',
                      Filename=os.path.join(saveDir, 'd_spacing', basename + '.dat'))
            ConvertUnits(InputWorkspace='WS_red', OutputWorkspace='WS_tof',
                         Target="TOF", AlignBins=False)
        else:
            SaveNexusProcessed(InputWorkspace='WS_nor',
                               Filename=os.path.join(saveDir, 'nexus', basename + '.nxs'))
            SaveAscii(InputWorkspace='WS_nor',
                      Filename=os.path.join(saveDir, 'd_spacing', basename + '.dat'))
            ConvertUnits(InputWorkspace='WS_nor', OutputWorkspace='WS_tof',
                         Target="TOF", AlignBins=False)

        SaveGSS(InputWorkspace='WS_tof',
                Filename=os.path.join(saveDir, 'gsas', basename + '.gsa'),
                Format='SLOG', SplitFiles=False, Append=False, ExtendedHeader=True)
        SaveFocusedXYE(InputWorkspace='WS_tof',
                       Filename=os.path.join(
                           saveDir, 'fullprof', basename + '.dat'),
                       SplitFiles=True, Append=False)
        DeleteWorkspace(Workspace='WS_tof')
Ejemplo n.º 2
0
    def runTest(self):
        water = '3659, 3663, 3667'
        sample = "3674, 3677, 3680"
        transmission_sample = '3671'
        beam = '3587'
        transmission_water = '3655'
        transmission_water_cell = '3592'
        transmission_beam = '3587'
        absorber = '3598, 3604, 3654'
        empty_cell_water = '3618, 3623, 3646'
        cell_background = '3676, 3679, 3682'
        transmission_empty_cell = '3673'

        # first process the water
        SANSILLAutoProcess(
            SampleRuns=water,
            BeamRuns=beam,
            DefaultMaskFile="side_mask.nxs",
            MaskFiles="beam_mask.nxs, side_mask.nxs, side_mask.nxs",
            TransmissionBeamRuns=transmission_beam,
            SampleTransmissionRuns=transmission_water,
            ContainerTransmissionRuns=transmission_water_cell,
            OutputWorkspace='water',
            TransmissionBeamRadius=1,
            BeamRadius=1,
            ContainerRuns=empty_cell_water,
            ThetaDependent=False,
            WaterCrossSection=0.87,
            SampleThickness=0.2,
            AbsorberRuns=absorber,
            ClearCorrected2DWorkspace=False)
        tmp_dir = gettempdir()
        water_dir = [
            os.path.join(tmp_dir, 'water_reference_g' + str(i) + '.nxs')
            for i in range(3)
        ]
        SaveNexusProcessed('003659_Sample', water_dir[0])
        SaveNexusProcessed('003663_Sample', water_dir[1])
        SaveNexusProcessed('003667_Sample', water_dir[2])

        # then process the sample
        SANSILLAutoProcess(SampleRuns=sample,
                           BeamRuns=beam,
                           DefaultMaskFile="side_mask",
                           MaskFiles="beam_mask, side_mask, side_mask",
                           TransmissionBeamRuns=transmission_beam,
                           OutputWorkspace='iq',
                           ContainerTransmissionRuns=transmission_empty_cell,
                           SampleTransmissionRuns=transmission_sample,
                           ContainerRuns=cell_background,
                           AbsorberRuns=absorber,
                           ThetaDependent=False,
                           WaterCrossSection=0.87,
                           SampleThickness=0.2,
                           TransmissionBeamRadius=1,
                           BeamRadius=1,
                           ReferenceFiles=",".join(water_dir),
                           StitchReferenceIndex=0)
Ejemplo n.º 3
0
    def runTest(self):
        beams = '947,1088'
        containers = '973,1003'
        container_tr = '988'
        beam_tr = '1119'
        samples = '975,1005'
        sample_tr = '990'
        solvents = '1106,1091'
        solvent_tr = '1121'
        thick = 0.1

        # this also tests that already loaded workspace can be passed instead of a file
        LoadNexusProcessed(Filename='sens-lamp.nxs',
                           OutputWorkspace='sens-lamp')
        # first, process the solvent
        SANSILLAutoProcess(SampleRuns=solvents,
                           BeamRuns=beams,
                           DefaultMaskFile='edge_masks',
                           MaskFiles='mask_8m_4_6A,mask_1m_4_6A',
                           SensitivityMaps='sens-lamp',
                           SampleTransmissionRuns=solvent_tr,
                           ContainerTransmissionRuns=container_tr,
                           TransmissionBeamRuns=beam_tr,
                           SampleThickness=thick,
                           CalculateResolution='MildnerCarpenter',
                           OutputWorkspace='solvents',
                           BeamRadius='0.05',
                           TransmissionBeamRadius=0.05,
                           ClearCorrected2DWorkspace=False,
                           StitchReferenceIndex=0)

        tmp_dir = gettempdir()
        solvent_dir = [
            os.path.join(tmp_dir, 'solvent_' + str(i) + '.nxs')
            for i in range(2)
        ]
        SaveNexusProcessed('001106_Sample', solvent_dir[0])
        SaveNexusProcessed('001091_Sample', solvent_dir[1])

        # reduce samples
        SANSILLAutoProcess(SampleRuns=samples,
                           BeamRuns=beams,
                           ContainerRuns=containers,
                           DefaultMaskFile='edge_masks',
                           MaskFiles='mask_8m_4_6A,mask_1m_4_6A',
                           SensitivityMaps='sens-lamp',
                           SampleTransmissionRuns=sample_tr,
                           ContainerTransmissionRuns=container_tr,
                           TransmissionBeamRuns=beam_tr,
                           SolventFiles=",".join(solvent_dir),
                           SampleThickness=thick,
                           CalculateResolution='MildnerCarpenter',
                           OutputWorkspace='iq_mult_solvent',
                           BeamRadius='0.05',
                           TransmissionBeamRadius=0.05,
                           StitchReferenceIndex=0)
Ejemplo n.º 4
0
    def _save(self, saveDir, basename, outputWksp):
        if not self.getProperty("SaveData").value:
            return

        self.log().notice('Writing to \'' + saveDir + '\'')

        SaveNexusProcessed(InputWorkspace=outputWksp,
                           Filename=os.path.join(saveDir, 'nexus',
                                                 basename + '.nxs'))
        SaveAscii(InputWorkspace=outputWksp,
                  Filename=os.path.join(saveDir, 'd_spacing',
                                        basename + '.dat'))
        ConvertUnits(InputWorkspace=outputWksp,
                     OutputWorkspace='WS_tof',
                     Target="TOF",
                     AlignBins=False)

        # GSAS and FullProf require data in time-of-flight
        SaveGSS(InputWorkspace='WS_tof',
                Filename=os.path.join(saveDir, 'gsas', basename + '.gsa'),
                Format='SLOG',
                SplitFiles=False,
                Append=False,
                ExtendedHeader=True)
        SaveFocusedXYE(InputWorkspace='WS_tof',
                       Filename=os.path.join(saveDir, 'fullprof',
                                             basename + '.dat'),
                       SplitFiles=True,
                       Append=False)
        DeleteWorkspace(Workspace='WS_tof')
Ejemplo n.º 5
0
    def save_workspaces(self, workspaces_to_save=None):
        """
        Use the private method _get_workspaces_to_save to get a list of workspaces that are present in the ADS to save
        to the directory that was passed at object creation time, it will also add each of them to the output_list
        private instance variable on the WorkspaceSaver class.
        :param workspaces_to_save: List of Strings; The workspaces that are to be saved to the project.
        """

        # Handle getting here and nothing has been given passed
        if workspaces_to_save is None:
            return

        for workspace_name in workspaces_to_save:
            # Get the workspace from the ADS
            workspace = ADS.retrieve(workspace_name)
            place_to_save_workspace = os.path.join(self.directory, workspace_name)

            from mantid.simpleapi import SaveMD, SaveNexusProcessed

            try:
                if isinstance(workspace, MDHistoWorkspace) or isinstance(workspace, IMDEventWorkspace):
                    # Save normally using SaveMD
                    SaveMD(InputWorkspace=workspace_name, Filename=place_to_save_workspace + ".nxs")
                else:
                    # Save normally using SaveNexusProcessed
                    SaveNexusProcessed(InputWorkspace=workspace_name, Filename=place_to_save_workspace + ".nxs")
            except Exception:
                logger.warning("Couldn't save workspace in project: " + workspace)

            self.output_list.append(workspace_name)
Ejemplo n.º 6
0
def save_mantid_nexus(workspace_name, file_name, title=''):
    """
    save workspace to NeXus for Mantid to import
    :param workspace_name:
    :param file_name:
    :param title:
    :return:
    """
    # check input
    checkdatatypes.check_file_name(file_name,
                                   check_exist=False,
                                   check_writable=True,
                                   is_dir=False)
    checkdatatypes.check_string_variable('Workspace title', title)

    # check workspace
    checkdatatypes.check_string_variable('Workspace name', workspace_name)
    if mtd.doesExist(workspace_name):
        SaveNexusProcessed(InputWorkspace=workspace_name,
                           Filename=file_name,
                           Title=title)
    else:
        raise RuntimeError(
            'Workspace {0} does not exist in Analysis data service. Available '
            'workspaces are {1}.'
            ''.format(workspace_name, mtd.getObjectNames()))
Ejemplo n.º 7
0
def sumToShim(rnum, output_dir=None):
    """
    Combine both spin states into a single workspace

    Parameters
    ----------
    rnum : int
      The run number to be shimmed
    output_dir : string
      If given, the folder where the workspace should be saved

    """
    try:
        wtemp = Load(BASE.format(rnum), LoadMonitors=True)
        RebinToWorkspace('wtemp_1',
                         'wtemp_monitors_1',
                         PreserveEvents=False,
                         OutputWorkspace='wtemp_1')
        RebinToWorkspace('wtemp_2',
                         'wtemp_monitors_1',
                         PreserveEvents=False,
                         OutputWorkspace='wtemp_2')
        wtemp_1 = ConjoinWorkspaces('wtemp_monitors_1', 'wtemp_1')
        wtemp_2 = ConjoinWorkspaces('wtemp_monitors_2', 'wtemp_2')
    except:
        wtemp_monitors = Load(BASE.format(rnum))
    wtempShim = mtd['wtemp_monitors_1'] + mtd['wtemp_monitors_2']
    RenameWorkspace(wtempShim, 'LARMOR{:08d}'.format(rnum))
    if output_dir:
        SaveNexusProcessed(
            'LARMOR{:08d}'.format(rnum),
            os.path.join(output_dir, "LARMOR{:08d}-add.nxs".format(rnum)))
    RenameWorkspace('LARMOR{:08d}'.format(rnum),
                    'LARMOR{:08d}-add'.format(rnum))
Ejemplo n.º 8
0
    def _save(self, runnumber, basename, outputWksp):
        if not self.getProperty("SaveData").value:
            return

        # determine where to save the data
        saveDir = self.getPropertyValue("OutputDirectory").strip()
        if len(saveDir) <= 0:
            self.log().notice('Using default save location')
            saveDir = os.path.join(self.get_IPTS_Local(runnumber), 'shared', 'data')

        self.log().notice('Writing to \'' + saveDir + '\'')

        SaveNexusProcessed(InputWorkspace=outputWksp,
                           Filename=os.path.join(saveDir, 'nexus', basename + '.nxs'))
        SaveAscii(InputWorkspace=outputWksp,
                  Filename=os.path.join(saveDir, 'd_spacing', basename + '.dat'))
        ConvertUnits(InputWorkspace=outputWksp, OutputWorkspace='WS_tof',
                     Target="TOF", AlignBins=False)

        # GSAS and FullProf require data in time-of-flight
        SaveGSS(InputWorkspace='WS_tof',
                Filename=os.path.join(saveDir, 'gsas', basename + '.gsa'),
                Format='SLOG', SplitFiles=False, Append=False, ExtendedHeader=True)
        SaveFocusedXYE(InputWorkspace='WS_tof',
                       Filename=os.path.join(
                           saveDir, 'fullprof', basename + '.dat'),
                       SplitFiles=True, Append=False)
        DeleteWorkspace(Workspace='WS_tof')
Ejemplo n.º 9
0
    def _save_output(self):
        from mantid.simpleapi import SaveNexusProcessed
        workdir = config['defaultsave.directory']
        el_eq1_path = os.path.join(workdir, self._scan_ws + '_el_eq1.nxs')
        logger.information('Creating file : %s' % el_eq1_path)
        SaveNexusProcessed(InputWorkspace=self._scan_ws + '_el_eq1',
                           Filename=el_eq1_path)
        el_eq2_path = os.path.join(workdir, self._scan_ws + '_el_eq2.nxs')
        logger.information('Creating file : %s' % el_eq2_path)
        SaveNexusProcessed(InputWorkspace=self._scan_ws + '_el_eq2',
                           Filename=el_eq2_path)

        inel_eq1_path = os.path.join(workdir, self._scan_ws + '_inel_eq1.nxs')
        logger.information('Creating file : %s' % inel_eq1_path)
        SaveNexusProcessed(InputWorkspace=self._scan_ws + '_inel_eq1',
                           Filename=inel_eq1_path)
        inel_eq2_path = os.path.join(workdir, self._scan_ws + '_inel_eq2.nxs')
        logger.information('Creating file : %s' % inel_eq2_path)
        SaveNexusProcessed(InputWorkspace=self._scan_ws + '_inel_eq2',
                           Filename=inel_eq2_path)

        eisf_path = os.path.join(workdir, self._scan_ws + '_eisf.nxs')
        logger.information('Creating file : %s' % eisf_path)
        SaveNexusProcessed(InputWorkspace=self._scan_ws + '_eisf',
                           Filename=eisf_path)

        if self._msdfit:
            msd_path = os.path.join(workdir, self._scan_ws + '_msd.nxs')
            logger.information('Creating file : %s' % msd_path)
            SaveNexusProcessed(InputWorkspace=self._scan_ws + '_msd',
                               Filename=msd_path)
            msd_fit_path = os.path.join(workdir, self._scan_ws + '_msd_fit.nxs')
            logger.information('Creating file : %s' % msd_fit_path)
            SaveNexusProcessed(InputWorkspace=self._scan_ws + '_msd_fit',
                               Filename=msd_fit_path)
Ejemplo n.º 10
0
def save_reduction(worksspace_names, formats, x_units='DeltaE'):
    """
    Saves the workspaces to the default save directory.

    @param worksspace_names List of workspace names to save
    @param formats List of formats to save in
    @param Output X units
    """
    from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE,
                                  SaveAscii, Rebin, DeleteWorkspace,
                                  ConvertSpectrumAxis, SaveDaveGrp)

    for workspace_name in worksspace_names:
        if 'spe' in formats:
            SaveSPE(InputWorkspace=workspace_name,
                    Filename=workspace_name + '.spe')

        if 'nxs' in formats:
            SaveNexusProcessed(InputWorkspace=workspace_name,
                               Filename=workspace_name + '.nxs')

        if 'nxspe' in formats:
            SaveNXSPE(InputWorkspace=workspace_name,
                      Filename=workspace_name + '.nxspe')

        if 'ascii' in formats:
            # Version 1 of SaveAscii produces output that works better with excel/origin
            # For some reason this has to be done with an algorithm object, using the function
            # wrapper with Version did not change the version that was run
            saveAsciiAlg = AlgorithmManager.createUnmanaged('SaveAscii', 1)
            saveAsciiAlg.initialize()
            saveAsciiAlg.setProperty('InputWorkspace', workspace_name)
            saveAsciiAlg.setProperty('Filename', workspace_name + '.dat')
            saveAsciiAlg.execute()

        if 'aclimax' in formats:
            if x_units == 'DeltaE_inWavenumber':
                bins = '24, -0.005, 4000' #cm-1
            else:
                bins = '3, -0.005, 500' #meV

            Rebin(InputWorkspace=workspace_name,
                  OutputWorkspace=workspace_name + '_aclimax_save_temp',
                  Params=bins)
            SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp',
                      Filename=workspace_name + '_aclimax.dat',
                      Separator='Tab')
            DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp')

        if 'davegrp' in formats:
            ConvertSpectrumAxis(InputWorkspace=workspace_name,
                                OutputWorkspace=workspace_name + '_davegrp_save_temp',
                                Target='ElasticQ',
                                EMode='Indirect')
            SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp',
                        Filename=workspace_name + '.grp')
            DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
Ejemplo n.º 11
0
def save_reduction(workspace_names, formats, x_units='DeltaE'):
    """
    Saves the workspaces to the default save directory.

    @param workspace_names List of workspace names to save
    @param formats List of formats to save in
    @param x_units X units
    """
    from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE,
                                  SaveAscii, Rebin, DeleteWorkspace,
                                  ConvertSpectrumAxis, SaveDaveGrp)

    for workspace_name in workspace_names:
        if 'spe' in formats:
            SaveSPE(InputWorkspace=workspace_name,
                    Filename=workspace_name + '.spe')

        if 'nxs' in formats:
            SaveNexusProcessed(InputWorkspace=workspace_name,
                               Filename=workspace_name + '.nxs')

        if 'nxspe' in formats:
            SaveNXSPE(InputWorkspace=workspace_name,
                      Filename=workspace_name + '.nxspe')

        if 'ascii' in formats:

            # Changed to version 2 to enable re-loading of files into mantid
            saveAsciiAlg = AlgorithmManager.createUnmanaged('SaveAscii', 2)
            saveAsciiAlg.initialize()
            saveAsciiAlg.setProperty('InputWorkspace', workspace_name)
            saveAsciiAlg.setProperty('Filename', workspace_name + '.dat')
            saveAsciiAlg.execute()

        if 'aclimax' in formats:
            if x_units == 'DeltaE_inWavenumber':
                bins = '24, -0.005, 4000'  #cm-1
            else:
                bins = '3, -0.005, 500'  #meV

            Rebin(InputWorkspace=workspace_name,
                  OutputWorkspace=workspace_name + '_aclimax_save_temp',
                  Params=bins)
            SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp',
                      Filename=workspace_name + '_aclimax.dat',
                      Separator='Tab')
            DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp')

        if 'davegrp' in formats:
            ConvertSpectrumAxis(InputWorkspace=workspace_name,
                                OutputWorkspace=workspace_name +
                                '_davegrp_save_temp',
                                Target='ElasticQ',
                                EMode='Indirect')
            SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp',
                        Filename=workspace_name + '.grp')
            DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
Ejemplo n.º 12
0
    def runTest(self):

        PowderILLEfficiency(CalibrationRun='967076.nxs', OutputWorkspace='calib')

        SaveNexusProcessed(InputWorkspace='calib', Filename=self._m_tmp_file)

        PowderILLEfficiency(CalibrationRun='967076.nxs', CalibrationFile=self._m_tmp_file, OutputWorkspace='calib-2nd')

        for i in range(mtd['calib-2nd'].getNumberHistograms()):
            self.assertDelta(mtd['calib-2nd'].readY(i), 1., 1E-3)
Ejemplo n.º 13
0
    def _save_output(self):
        """
        Save the output workspace to the user's default working directory
        """
        from IndirectCommon import getDefaultWorkingDirectory
        workdir = getDefaultWorkingDirectory()
        file_path = os.path.join(workdir, self._output_workspace + '.nxs')
        SaveNexusProcessed(InputWorkspace=self._output_workspace,
                           Filename=file_path)

        if self._verbose:
            logger.notice('Output file : ' + file_path)
Ejemplo n.º 14
0
def save_reduction(workspace_names, formats, x_units='DeltaE'):
    """
    Saves the workspaces to the default save directory.

    @param workspace_names List of workspace names to save
    @param formats List of formats to save in
    @param x_units X units
    """
    from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE,
                                  SaveAscii, Rebin, DeleteWorkspace,
                                  ConvertSpectrumAxis, SaveDaveGrp)

    for workspace_name in workspace_names:
        if 'spe' in formats:
            SaveSPE(InputWorkspace=workspace_name,
                    Filename=workspace_name + '.spe')

        if 'nxs' in formats:
            SaveNexusProcessed(InputWorkspace=workspace_name,
                               Filename=workspace_name + '.nxs')

        if 'nxspe' in formats:
            SaveNXSPE(InputWorkspace=workspace_name,
                      Filename=workspace_name + '.nxspe')

        if 'ascii' in formats:
            _save_ascii(workspace_name, workspace_name + ".dat")

        if 'aclimax' in formats:
            if x_units == 'DeltaE_inWavenumber':
                bins = '24, -0.005, 4000'  # cm-1
            else:
                bins = '3, -0.005, 500'  # meV

            Rebin(InputWorkspace=workspace_name,
                  OutputWorkspace=workspace_name + '_aclimax_save_temp',
                  Params=bins)
            SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp',
                      Filename=workspace_name + '_aclimax.dat',
                      Separator='Tab')
            DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp')

        if 'davegrp' in formats:
            ConvertSpectrumAxis(InputWorkspace=workspace_name,
                                OutputWorkspace=workspace_name +
                                '_davegrp_save_temp',
                                Target='ElasticQ',
                                EMode='Indirect')
            SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp',
                        Filename=workspace_name + '.grp')
            DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
Ejemplo n.º 15
0
def save_bank_table(data: Workspace,
                    bank_id: int,
                    database_path: str,
                    date: str,
                    table_type: str = 'calibration') -> None:
    """
    Function that saves a bank calibrated TableWorkspace into a single HDF5 file
    using corelli format and using current date:
    database_path/bank0ID/type_corelli_bank0ID_YYYYMMDD.nxs.h5
    :param data input Workspace (TableWorkspace) data for calibrated pixels
    :param bank_id bank number that is calibrated
    :param date format YYYYMMDD
    :param database_path location of the corelli database (absolute or relative)
           Example: database/corelli/ for
                    database/corelli/bank001/
                    database/corelli/bank002/
    :param table_type 'calibration', 'mask' or 'fit'
    """
    verify_date_format('save_bank_table', date)
    filename: str = filename_bank_table(bank_id, database_path, date,
                                        table_type)
    SaveNexusProcessed(data, filename)
Ejemplo n.º 16
0
    def __processFile(self, filename, file_prog_start,
                      determineCharacterizations,
                      createUnfocused):  # noqa: C902,C901
        # create a unique name for the workspace
        wkspname = '__' + self.__wkspNameFromFile(filename)
        wkspname += '_f%d' % self._filenames.index(
            filename)  # add file number to be unique
        unfocusname = ''
        if createUnfocused:
            unfocusname = wkspname + '_unfocused'

        # check for a cachefilename
        cachefile = self.__getCacheName(self.__wkspNameFromFile(filename))
        self.log().information('looking for cachefile "{}"'.format(cachefile))
        if (not createUnfocused
            ) and self.useCaching and os.path.exists(cachefile):
            try:
                if self.__loadCacheFile(cachefile, wkspname):
                    return wkspname, ''
            except RuntimeError as e:
                # log as a warning and carry on as though the cache file didn't exist
                self.log().warning('Failed to load cache file "{}": {}'.format(
                    cachefile, e))
        else:
            self.log().information('not using cache')

        chunks = determineChunking(filename, self.chunkSize)
        numSteps = 6  # for better progress reporting - 6 steps per chunk
        if createUnfocused:
            numSteps = 7  # one more for accumulating the unfocused workspace
        self.log().information('Processing \'{}\' in {:d} chunks'.format(
            filename, len(chunks)))
        prog_per_chunk_step = self.prog_per_file * 1. / (numSteps *
                                                         float(len(chunks)))

        unfocusname_chunk = ''
        canSkipLoadingLogs = False

        # inner loop is over chunks
        haveAccumulationForFile = False
        for (j, chunk) in enumerate(chunks):
            prog_start = file_prog_start + float(j) * float(
                numSteps - 1) * prog_per_chunk_step

            # if reading all at once, put the data into the final name directly
            if len(chunks) == 1:
                chunkname = wkspname
                unfocusname_chunk = unfocusname
            else:
                chunkname = '{}_c{:d}'.format(wkspname, j)
                if unfocusname:  # only create unfocus chunk if needed
                    unfocusname_chunk = '{}_c{:d}'.format(unfocusname, j)

            # load a chunk - this is a bit crazy long because we need to get an output property from `Load` when it
            # is run and the algorithm history doesn't exist until the parent algorithm (this) has finished
            loader = self.__createLoader(
                filename,
                chunkname,
                skipLoadingLogs=(len(chunks) > 1 and canSkipLoadingLogs
                                 and haveAccumulationForFile),
                progstart=prog_start,
                progstop=prog_start + prog_per_chunk_step,
                **chunk)
            loader.execute()
            if j == 0:
                self.__setupCalibration(chunkname)

            # copy the necessary logs onto the workspace
            if len(chunks
                   ) > 1 and canSkipLoadingLogs and haveAccumulationForFile:
                CopyLogs(InputWorkspace=wkspname,
                         OutputWorkspace=chunkname,
                         MergeStrategy='WipeExisting')
                # re-load instrument so detector positions that depend on logs get initialized
                try:
                    LoadIDFFromNexus(Workspace=chunkname,
                                     Filename=filename,
                                     InstrumentParentPath='/entry')
                except RuntimeError as e:
                    self.log().warning(
                        'Reloading instrument using "LoadIDFFromNexus" failed: {}'
                        .format(e))

            # get the underlying loader name if we used the generic one
            if self.__loaderName == 'Load':
                self.__loaderName = loader.getPropertyValue('LoaderName')
            # only LoadEventNexus can turn off loading logs, but FilterBadPulses
            # requires them to be loaded from the file
            canSkipLoadingLogs = self.__loaderName == 'LoadEventNexus' and self.filterBadPulses <= 0. and haveAccumulationForFile

            if determineCharacterizations and j == 0:
                self.__determineCharacterizations(
                    filename, chunkname)  # updates instance variable
                determineCharacterizations = False

            if self.__loaderName == 'LoadEventNexus' and mtd[
                    chunkname].getNumberEvents() == 0:
                self.log().notice(
                    'Chunk {} of {} contained no events. Skipping to next chunk.'
                    .format(j + 1, len(chunks)))
                continue

            prog_start += prog_per_chunk_step
            if self.filterBadPulses > 0.:
                FilterBadPulses(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                LowerCutoff=self.filterBadPulses,
                                startProgress=prog_start,
                                endProgress=prog_start + prog_per_chunk_step)
                if mtd[chunkname].getNumberEvents() == 0:
                    msg = 'FilterBadPulses removed all events from '
                    if len(chunks) == 1:
                        raise RuntimeError(msg + filename)
                    else:
                        raise RuntimeError(msg + 'chunk {} of {} in {}'.format(
                            j, len(chunks), filename))

            prog_start += prog_per_chunk_step

            # absorption correction workspace
            if self.absorption is not None and len(str(self.absorption)) > 0:
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='Wavelength',
                             EMode='Elastic')
                # rebin the absorption correction to match the binning of the inputs if in histogram mode
                # EventWorkspace will compare the wavelength of each individual event
                absWksp = self.absorption
                if mtd[chunkname].id() != 'EventWorkspace':
                    absWksp = '__absWkspRebinned'
                    RebinToWorkspace(WorkspaceToRebin=self.absorption,
                                     WorkspaceToMatch=chunkname,
                                     OutputWorkspace=absWksp)
                Divide(LHSWorkspace=chunkname,
                       RHSWorkspace=absWksp,
                       OutputWorkspace=chunkname,
                       startProgress=prog_start,
                       endProgress=prog_start + prog_per_chunk_step)
                if absWksp != self.absorption:  # clean up
                    DeleteWorkspace(Workspace=absWksp)
                ConvertUnits(InputWorkspace=chunkname,
                             OutputWorkspace=chunkname,
                             Target='TOF',
                             EMode='Elastic')
            prog_start += prog_per_chunk_step

            if self.kwargs is None:
                raise RuntimeError(
                    'Somehow arguments for "AlignAndFocusPowder" aren\'t set')

            AlignAndFocusPowder(InputWorkspace=chunkname,
                                OutputWorkspace=chunkname,
                                UnfocussedWorkspace=unfocusname_chunk,
                                startProgress=prog_start,
                                endProgress=prog_start +
                                2. * prog_per_chunk_step,
                                **self.kwargs)
            prog_start += 2. * prog_per_chunk_step  # AlignAndFocusPowder counts for two steps

            self.__accumulate(chunkname,
                              wkspname,
                              unfocusname_chunk,
                              unfocusname,
                              not haveAccumulationForFile,
                              removelogs=canSkipLoadingLogs)

            haveAccumulationForFile = True
        # end of inner loop
        if not mtd.doesExist(wkspname):
            raise RuntimeError(
                'Failed to process any data from file "{}"'.format(filename))

        # copy the sample object from the absorption workspace
        if self.absorption is not None and len(str(self.absorption)) > 0:
            CopySample(InputWorkspace=self.absorption,
                       OutputWorkspace=wkspname,
                       CopyEnvironment=False)

        # write out the cachefile for the main reduced data independent of whether
        # the unfocussed workspace was requested
        if self.useCaching and not os.path.exists(cachefile):
            self.log().information(
                'Saving data to cachefile "{}"'.format(cachefile))
            SaveNexusProcessed(InputWorkspace=wkspname, Filename=cachefile)

        return wkspname, unfocusname
    def PyExec(self):
        filenames = self._getLinearizedFilenames('Filename')
        self.filterBadPulses = self.getProperty('FilterBadPulses').value
        self.chunkSize = self.getProperty('MaxChunkSize').value
        self.absorption = self.getProperty('AbsorptionWorkspace').value
        self.charac = self.getProperty('Characterizations').value
        finalname = self.getProperty('OutputWorkspace').valueAsStr

        self.prog_per_file = 1. / float(
            len(filenames))  # for better progress reporting

        # these are also passed into the child-algorithms
        self.kwargs = self.__getAlignAndFocusArgs()

        # outer loop creates chunks to load
        for (i, filename) in enumerate(filenames):
            # default name is based off of filename
            wkspname = os.path.split(filename)[-1].split('.')[0]
            self.__determineCharacterizations(
                filename, wkspname)  # updates instance variable
            cachefile = self.__getCacheName(wkspname)
            wkspname += '_f%d' % i  # add file number to be unique

            if cachefile is not None and os.path.exists(cachefile):
                LoadNexusProcessed(Filename=cachefile,
                                   OutputWorkspace=wkspname)
                # TODO LoadNexusProcessed has a bug. When it finds the
                # instrument name without xml it reads in from an IDF
                # in the instrument directory.
                editinstrargs = {}
                for name in PROPS_FOR_INSTR:
                    prop = self.getProperty(name)
                    if not prop.isDefault:
                        editinstrargs[name] = prop.value
                if editinstrargs:
                    EditInstrumentGeometry(Workspace=wkspname, **editinstrargs)
            else:
                self.__processFile(filename, wkspname,
                                   self.prog_per_file * float(i))
                if cachefile is not None:
                    SaveNexusProcessed(InputWorkspace=wkspname,
                                       Filename=cachefile)

            # accumulate runs
            if i == 0:
                if wkspname != finalname:
                    RenameWorkspace(InputWorkspace=wkspname,
                                    OutputWorkspace=finalname)
            else:
                Plus(LHSWorkspace=finalname,
                     RHSWorkspace=wkspname,
                     OutputWorkspace=finalname,
                     ClearRHSWorkspace=self.kwargs['PreserveEvents'])
                DeleteWorkspace(Workspace=wkspname)
                if self.kwargs['PreserveEvents']:
                    CompressEvents(InputWorkspace=finalname,
                                   OutputWorkspace=finalname)

        # with more than one chunk or file the integrated proton charge is
        # generically wrong
        mtd[finalname].run().integrateProtonCharge()

        # set the output workspace
        self.setProperty('OutputWorkspace', mtd[finalname])
Ejemplo n.º 18
0
 def validate(self):
     self.disableChecking.append('Instrument')
     from mantid.simpleapi import SaveNexusProcessed
     SaveNexusProcessed("992_rear_1DPhi-45.0_45.0", '/tmp/bad.nxs')
     return "992_rear_1DPhi-45.0_45.0", "SANS_TOML_Wav_Loops_ref.nxs"
Ejemplo n.º 19
0
def reduce_to_2theta(hb2b_builder,
                     pixel_matrix,
                     hb2b_data_ws_name,
                     counts_array,
                     mask_vec,
                     mask_ws_name,
                     num_bins=1000):
    """
    Reduce to 2theta with Masks
    :param hb2b_builder:
    :param pixel_matrix:
    :param hb2b_data_ws_name:
    :param counts_array:
    :param mask_vec:
    :param num_bins:
    :return:
    """
    # reduce by PyRS
    if False:
        pyrs_raw_ws = mtd[pyrs_raw_name]
        vec_counts = pyrs_raw_ws.readY(0)
    else:
        vec_counts = counts_array.astype('float64')

    # mask
    if mask_vec is not None:
        print(mask_vec.dtype)
        vec_counts.astype('float64')
        mask_vec.astype('float64')
        vec_counts *= mask_vec
    # reduce
    bin_edgets, histogram = hb2b_builder.reduce_to_2theta_histogram(
        pixel_matrix, vec_counts, num_bins)

    # create workspace
    pyrs_reduced_name = '{}_pyrs_reduced'.format(hb2b_data_ws_name)
    CreateWorkspace(DataX=bin_edgets,
                    DataY=histogram,
                    NSpec=1,
                    OutputWorkspace=pyrs_reduced_name)
    SaveNexusProcessed(InputWorkspace=pyrs_reduced_name,
                       Filename='{}.nxs'.format(pyrs_reduced_name),
                       Title='PyRS reduced: {}'.format(hb2b_data_ws_name))

    if True:
        # Mantid
        # transfer to 2theta for data
        two_theta_ws_name = '{}_2theta'.format(hb2b_data_ws_name)

        # Mask
        if mask_ws_name:
            # Multiply by masking workspace
            masked_ws_name = '{}_masked'.format(hb2b_data_ws_name)
            Multiply(LHSWorkspace=hb2b_data_ws_name,
                     RHSWorkspace=mask_ws_name,
                     OutputWorkspace=masked_ws_name,
                     ClearRHSWorkspace=False)
            hb2b_data_ws_name = masked_ws_name
            SaveNexusProcessed(InputWorkspace=hb2b_data_ws_name,
                               Filename='{}_raw.nxs'.format(hb2b_data_ws_name))
        # END-IF

        # # this is for test only!
        # ConvertSpectrumAxis(InputWorkspace=hb2b_data_ws_name, OutputWorkspace=two_theta_ws_name, Target='Theta',
        #                     OrderAxis=False)
        # Transpose(InputWorkspace=two_theta_ws_name, OutputWorkspace=two_theta_ws_name)
        # two_theta_ws = mtd[two_theta_ws_name]
        # for i in range(10):
        #     print ('{}: x = {}, y = {}'.format(i, two_theta_ws.readX(0)[i], two_theta_ws.readY(0)[i]))
        # for i in range(10010, 10020):
        #     print ('{}: x = {}, y = {}'.format(i, two_theta_ws.readX(0)[i], two_theta_ws.readY(0)[i]))

        ConvertSpectrumAxis(InputWorkspace=hb2b_data_ws_name,
                            OutputWorkspace=two_theta_ws_name,
                            Target='Theta')
        Transpose(InputWorkspace=two_theta_ws_name,
                  OutputWorkspace=two_theta_ws_name)
        # final:
        mantid_reduced_name = '{}_mtd_reduced'.format(hb2b_data_ws_name)
        ResampleX(InputWorkspace=two_theta_ws_name,
                  OutputWorkspace=mantid_reduced_name,
                  NumberBins=num_bins,
                  PreserveEvents=False)
        mantid_ws = mtd[mantid_reduced_name]

        SaveNexusProcessed(
            InputWorkspace=mantid_reduced_name,
            Filename='{}.nxs'.format(mantid_reduced_name),
            Title='Mantid reduced: {}'.format(hb2b_data_ws_name))

        plt.plot(mantid_ws.readX(0),
                 mantid_ws.readY(0),
                 color='blue',
                 mark='o')

    # END-IF

    plt.plot(bin_edgets[:-1], histogram, color='red')

    plt.show()

    return
Ejemplo n.º 20
0
def save_banks(InputWorkspace,
               Filename,
               Title,
               OutputDir,
               Binning=None,
               GroupingWorkspace=None):
    """
    Saves input workspace to processed NeXus file in specified
    output directory with optional rebinning and grouping
    (to coarsen) the output in a bank-by-bank manner. Mainly
    wraps Mantid `SaveNexusProcessed` algorithm.

    :param InputWorkspace: Mantid workspace to save out
    :type InputWorkspace: MatrixWorkspace
    :param Filename: Filename to save output
    :type Filename: str
    :param Title: A title to describe the saved workspace
    :type Title: str
    :param OutputDir: Output directory to save the processed NeXus file
    :type OutputDir: path str
    :param Binning: Optional rebinning of event workspace.
                    See `Rebin` in Mantid for options
    :type Binning: dbl list
    :param GroupingWorkspace: A workspace with grouping
                              information for the output spectra
    :type GroupWorkspace: GroupWorkspace
    """

    # Make a local clone
    CloneWorkspace(InputWorkspace=InputWorkspace, OutputWorkspace="__tmp")
    tmp_wksp = mtd["__tmp"]

    # Rebin if requested
    if Binning:
        tmp_wksp = Rebin(InputWorkspace=tmp_wksp,
                         Params=Binning,
                         PreserveEvents=True)

    # Convert to distributions to remove bin width dependence
    yunit = tmp_wksp.YUnit()
    if yunit == "Counts":
        try:
            ConvertToDistribution(tmp_wksp)
        except BaseException:
            pass

    # Output to desired level of grouping
    isEventWksp = isinstance(tmp_wksp, IEventWorkspace)
    if isEventWksp and GroupingWorkspace and yunit == "Counts":
        tmp_wksp = DiffractionFocussing(InputWorkspace=tmp_wksp,
                                        GroupingWorkspace=GroupingWorkspace,
                                        PreserveEvents=False)

    # Save out wksp to file
    filename = os.path.join(os.path.abspath(OutputDir), Filename)
    SaveNexusProcessed(InputWorkspace=tmp_wksp,
                       Filename=filename,
                       Title=Title,
                       Append=True,
                       PreserveEvents=False,
                       WorkspaceIndexList=range(
                           tmp_wksp.getNumberHistograms()))
    DeleteWorkspace(tmp_wksp)
Ejemplo n.º 21
0
def add_runs(
        runs,  # noqa: C901
        inst='sans2d',
        defType='.nxs',
        rawTypes=('.raw', '.s*', 'add', '.RAW'),
        lowMem=False,
        binning='Monitors',
        saveAsEvent=False,
        isOverlay=False,
        time_shifts=None,
        outFile=None,
        outFile_monitors=None,
        save_directory=None,
        estimate_logs=False):
    if inst.upper() == "SANS2DTUBES":
        inst = "SANS2D"

    # Check if there is at least one file in the list
    if len(runs) < 1:
        return

    if not defType.startswith('.'):
        defType = '.' + defType

    # Create the correct format of adding files
    if time_shifts is None:
        time_shifts = []
    adder = AddOperation(isOverlay, time_shifts)

    # These input arguments need to be arrays of strings, enforce this
    if isinstance(runs, str):
        runs = (runs, )
    if isinstance(rawTypes, str):
        rawTypes = (rawTypes, )

    if lowMem:
        if _can_load_periods(runs, defType, rawTypes):
            period = 1
    else:
        period = _NO_INDIVIDUAL_PERIODS

    userEntry = runs[0]

    counter_run = 0

    while True:
        isFirstDataSetEvent = False
        try:
            lastPath, lastFile, logFile, num_periods, isFirstDataSetEvent = _load_ws(
                userEntry, defType, inst, ADD_FILES_SUM_TEMPORARY, rawTypes,
                period)

            is_not_allowed_instrument = inst.upper() not in {
                'SANS2D', 'LARMOR', 'ZOOM'
            }
            if is_not_allowed_instrument and isFirstDataSetEvent:
                error = 'Adding event data not supported for ' + inst + ' for now'
                print(error)
                sanslog.error(error)
                for workspaceName in (ADD_FILES_SUM_TEMPORARY,
                                      ADD_FILES_SUM_TEMPORARY_MONITORS):
                    if workspaceName in mtd:
                        DeleteWorkspace(workspaceName)
                return ""

            for i in range(len(runs) - 1):
                userEntry = runs[i + 1]
                lastPath, lastFile, logFile, dummy, is_data_set_event = _load_ws(
                    userEntry, defType, inst, ADD_FILES_NEW_TEMPORARY,
                    rawTypes, period)

                if is_data_set_event != isFirstDataSetEvent:
                    error = 'Datasets added must be either ALL histogram data or ALL event data'
                    print(error)
                    sanslog.error(error)
                    for workspaceName in (ADD_FILES_SUM_TEMPORARY,
                                          ADD_FILES_SUM_TEMPORARY_MONITORS,
                                          ADD_FILES_NEW_TEMPORARY,
                                          ADD_FILES_NEW_TEMPORARY_MONITORS):
                        if workspaceName in mtd:
                            DeleteWorkspace(workspaceName)
                    return ""

                adder.add(LHS_workspace=ADD_FILES_SUM_TEMPORARY,
                          RHS_workspace=ADD_FILES_NEW_TEMPORARY,
                          output_workspace=ADD_FILES_SUM_TEMPORARY,
                          run_to_add=counter_run,
                          estimate_logs=estimate_logs)

                if isFirstDataSetEvent:
                    adder.add(
                        LHS_workspace=ADD_FILES_SUM_TEMPORARY_MONITORS,
                        RHS_workspace=ADD_FILES_NEW_TEMPORARY_MONITORS,
                        output_workspace=ADD_FILES_SUM_TEMPORARY_MONITORS,
                        run_to_add=counter_run,
                        estimate_logs=estimate_logs)
                DeleteWorkspace(ADD_FILES_NEW_TEMPORARY)
                if isFirstDataSetEvent:
                    DeleteWorkspace(ADD_FILES_NEW_TEMPORARY_MONITORS)
                # Increment the run number
                counter_run += 1
        except ValueError as e:
            error = "Error opening file {}: {}".format(userEntry, str(e))
            print(error)
            sanslog.error(error)
            if ADD_FILES_SUM_TEMPORARY in mtd:
                DeleteWorkspace(ADD_FILES_SUM_TEMPORARY)
            return ""
        except Exception as e:
            # We need to catch all exceptions to ensure that a dialog box is raised with the error
            error = "Error finding files: {}".format(str(e))
            print(error)
            sanslog.error(error)
            for workspaceName in (ADD_FILES_SUM_TEMPORARY,
                                  ADD_FILES_NEW_TEMPORARY):
                if workspaceName in mtd:
                    DeleteWorkspace(workspaceName)
            return ""

        # In case of event file force it into a histogram workspace if this is requested
        if isFirstDataSetEvent and not saveAsEvent:
            handle_saving_event_workspace_when_saving_as_histogram(
                binning, runs, defType, inst)

        lastFile = os.path.splitext(lastFile)[0]
        # Now save the added file
        outFile = lastFile + '-add.' + 'nxs' if outFile is None else outFile
        outFile_monitors = lastFile + '-add_monitors.' + 'nxs' if outFile_monitors is None else outFile_monitors
        if save_directory is not None:
            # In ISIS SANS gui, an output directory can be specified.
            # If one has, add it to filepath here
            outFile = save_directory + outFile
            outFile_monitors = save_directory + outFile_monitors
        sanslog.notice("Writing file: {}".format(outFile))

        if period == 1 or period == _NO_INDIVIDUAL_PERIODS:
            # Replace the file the first time around
            SaveNexusProcessed(InputWorkspace=ADD_FILES_SUM_TEMPORARY,
                               Filename=outFile,
                               Append=False)
            # If we are saving event data, then we need to save also the monitor file
            if isFirstDataSetEvent and saveAsEvent:
                SaveNexusProcessed(
                    InputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS,
                    Filename=outFile_monitors,
                    Append=False)

        else:
            # Then append
            SaveNexusProcessed(ADD_FILES_SUM_TEMPORARY, outFile, Append=True)
            if isFirstDataSetEvent and saveAsEvent:
                SaveNexusProcessed(ADD_FILES_SUM_TEMPORARY_MONITORS,
                                   outFile_monitors,
                                   Append=True)

        DeleteWorkspace(ADD_FILES_SUM_TEMPORARY)
        if isFirstDataSetEvent:
            DeleteWorkspace(ADD_FILES_SUM_TEMPORARY_MONITORS)

        if period == num_periods:
            break

        if period == _NO_INDIVIDUAL_PERIODS:
            break
        else:
            period += 1

    if isFirstDataSetEvent and saveAsEvent:
        filename, ext = _make_filename(runs[0], defType, inst)
        workspace_type = get_workspace_type(filename)
        is_multi_period = True if workspace_type is WorkspaceType.MultiperiodEvent else False
        outFile = bundle_added_event_data_as_group(outFile, outFile_monitors,
                                                   is_multi_period)

    # This adds the path to the filename
    path, base = os.path.split(outFile)
    if path == '' or base not in os.listdir(path):
        # Try the default save directory
        path_prefix = save_directory if save_directory else config[
            "defaultsave.directory"]
        path = path_prefix + path
        # If the path is still an empty string check in the current working directory
        if path == '':
            path = os.getcwd()
        assert base in os.listdir(path)
    path_out = path
    if logFile:
        _copy_log(lastPath, logFile, path_out)

    return 'The following file has been created:\n' + outFile
Ejemplo n.º 22
0
def export_workspaces(ws_name_list):
    for ws_name in ws_name_list:
        SaveNexusProcessed(InputWorkspace=ws_name, Filename='/tmp/{}.nxs'.format(ws_name))
Ejemplo n.º 23
0
    def PyExec(self):
        filenames = self._getLinearizedFilenames('Filename')
        self.filterBadPulses = self.getProperty('FilterBadPulses').value
        self.chunkSize = self.getProperty('MaxChunkSize').value
        self.absorption = self.getProperty('AbsorptionWorkspace').value
        self.charac = self.getProperty('Characterizations').value
        finalname = self.getPropertyValue('OutputWorkspace')
        useCaching = len(self.getProperty('CacheDir').value) > 0

        # accumulate the unfocused workspace if it was requested
        # empty string means it is not used
        unfocusname = self.getPropertyValue('UnfocussedWorkspace')
        unfocusname_file = ''
        if len(unfocusname) > 0:
            unfocusname_file = '__{}_partial'.format(unfocusname)

        if useCaching:
            # unfocus check only matters if caching is requested
            if unfocusname != '':
                self.log().warning(
                    'CacheDir is specified with "UnfocussedWorkspace" - reading cache files disabled'
                )
        else:
            self.log().warning(
                'CacheDir is not specified - functionality disabled')

        self.prog_per_file = 1. / float(
            len(filenames))  # for better progress reporting

        # these are also passed into the child-algorithms
        self.kwargs = self.__getAlignAndFocusArgs()

        # outer loop creates chunks to load
        for (i, filename) in enumerate(filenames):
            # default name is based off of filename
            wkspname = os.path.split(filename)[-1].split('.')[0]

            if useCaching:
                self.__determineCharacterizations(
                    filename, wkspname, True)  # updates instance variable
                cachefile = self.__getCacheName(wkspname)
            else:
                cachefile = None

            wkspname += '_f%d' % i  # add file number to be unique

            # if the unfocussed data is requested, don't read it from disk
            # because of the extra complication of the unfocussed workspace
            if useCaching and os.path.exists(cachefile) and unfocusname == '':
                LoadNexusProcessed(Filename=cachefile,
                                   OutputWorkspace=wkspname)
                # TODO LoadNexusProcessed has a bug. When it finds the
                # instrument name without xml it reads in from an IDF
                # in the instrument directory.
                editinstrargs = {}
                for name in PROPS_FOR_INSTR:
                    prop = self.getProperty(name)
                    if not prop.isDefault:
                        editinstrargs[name] = prop.value
                if editinstrargs:
                    EditInstrumentGeometry(Workspace=wkspname, **editinstrargs)
            else:
                self.__processFile(filename, wkspname, unfocusname_file,
                                   self.prog_per_file * float(i),
                                   not useCaching)

                # write out the cachefile for the main reduced data independent of whether
                # the unfocussed workspace was requested
                if useCaching:
                    SaveNexusProcessed(InputWorkspace=wkspname,
                                       Filename=cachefile)

            # accumulate runs
            if i == 0:
                if wkspname != finalname:
                    RenameWorkspace(InputWorkspace=wkspname,
                                    OutputWorkspace=finalname)
                if unfocusname != '':
                    RenameWorkspace(InputWorkspace=unfocusname_file,
                                    OutputWorkspace=unfocusname)
            else:
                Plus(LHSWorkspace=finalname,
                     RHSWorkspace=wkspname,
                     OutputWorkspace=finalname,
                     ClearRHSWorkspace=self.kwargs['PreserveEvents'])
                DeleteWorkspace(Workspace=wkspname)

                if unfocusname != '':
                    Plus(LHSWorkspace=unfocusname,
                         RHSWorkspace=unfocusname_file,
                         OutputWorkspace=unfocusname,
                         ClearRHSWorkspace=self.kwargs['PreserveEvents'])
                    DeleteWorkspace(Workspace=unfocusname_file)

                if self.kwargs['PreserveEvents']:
                    CompressEvents(InputWorkspace=finalname,
                                   OutputWorkspace=finalname)
                    # not compressing unfocussed workspace because it is in d-spacing
                    # and is likely to be from a different part of the instrument

        # with more than one chunk or file the integrated proton charge is
        # generically wrong
        mtd[finalname].run().integrateProtonCharge()

        # set the output workspace
        self.setProperty('OutputWorkspace', mtd[finalname])
        if unfocusname != '':
            self.setProperty('UnfocussedWorkspace', mtd[unfocusname])
Ejemplo n.º 24
0
def save_workspace(workspace_name, filename):
    SaveNexusProcessed(InputWorkspace=workspace_name,
                       Filename=filename,
                       EnableLogging=False)
Ejemplo n.º 25
0
 def __saveSummedGroupToCache(self, group, wkspname):
     cache_file = self.__getGroupCacheName(group)
     if not os.path.exists(cache_file):
         SaveNexusProcessed(InputWorkspace=wkspname, Filename=cache_file)
     return
Ejemplo n.º 26
0
def new_corelli_calibration(
        database_path: str,
        date: Optional[str] = None) -> Tuple[str, str, str]:
    r"""
    Generate a Corelli calibration set of files for a given day stamp, or for today if no day stamp is given.

    For each bank, this function will retrieve the calibration with an anterior date as close as possible
    to the given day stamp. The day stamp for the generated calibration will the most modern day stamp among
    the day stamps of all banks.

    The files to be produced are:
    - database_path/calibration_corelli_YYYYMMDD.nxs.h5
    - database_path/mask_corelli_YYYYMMDD.nxs.h5
    - database_path/manifest_corelli_YYYYMMDD.nxs.h5

    Example: Assume today's date is 20201201 and we have a database with calibrations for two
    different days. Furthermore our instrument has only one bank, for simplicity.
    database_path/
    |_bank001/
      |_calibration_corelli_bank001_20200101.nxs.h5  (calibration in January)
      |_calibration_corelli_bank001_20206101.nxs.h5  (calibration in June)
      |_mask_corelli_bank0010_20200101.nxs.h5
      |_mask_corelli_bank0010_20200601.nxs.h5
    Invoking today new_corelli_calibration(database_path) will create the following files:
    - database_path/calibration_corelli_20206101.nxs.h5
    - database_path/mask_corelli_20206101.nxs.h5
    - database_path/manifest_corelli_20206101.nxs.h5
    Notice the date of the files (20206101) is not today's date (20201201) but the most "modern" day-stamp in
    the database.
    Invoking today new_corelli_calibration(database_path, date=20200301) will create the following files:
    - database_path/calibration_corelli_20201101.nxs.h5
    - database_path/mask_corelli_20201101.nxs.h5
    - database_path/manifest_corelli_20201101.nxs.h5
    The January files (20201101) are selected because we requested an instrument calibration
    with a date (20200301) prior to the June files.



    :param database_path: absolute path to the database containing the bank calibrations
    :param date: day stamp in format YYYYMMDD

    :return: absolute path to files containing the calibrated pixels, the masked pixels,
        and the manifest file, in this order.
    """
    if date is None:
        date = datetime.now().strftime(
            '%Y%m%d')  # today's date in YYYYMMDD format
    verify_date_format('new_corelli_calibration', date)

    file_paths = dict()
    for table_type in ('calibration', 'mask'):
        logger.notice(
            f'** Gathering {table_type} tables from individual banks')
        bank_tables, bank_stamps = combine_temporal_banks(
            database_path, date, table_type)
        if len(bank_stamps) == 0:
            logger.warning(
                f'No bank {table_type} files found with date < {date}')
            continue
        logger.notice(
            f'** Combining {table_type} tables from individual banks')
        table = combine_spatial_banks(bank_tables, table_type=table_type)

        bank_numbers, day_stamps = zip(*bank_stamps)
        last_day_stamp = sorted(day_stamps)[-1]
        filename = str(
            pathlib.Path(database_path) /
            f'{table_type}_corelli_{last_day_stamp}.nxs.h5')
        logger.notice(f'** Saving instrument {table_type} to the database')
        SaveNexusProcessed(InputWorkspace=table, Filename=filename)
        file_paths[table_type] = filename

        if table_type == 'calibration':
            logger.notice('** Creating and saving the manifest file')
            file_paths['manifest'] = save_manifest_file(
                database_path,
                bank_numbers,
                day_stamps,
                manifest_day_stamp=last_day_stamp)

    return [file_paths[x] for x in ('calibration', 'mask', 'manifest')]
Ejemplo n.º 27
0
    def inner(*args, **kwargs):
        """
        How caching name works
        """
        # unpack key arguments
        wksp_name = args[0]
        abs_method = args[1]
        cache_dirs = kwargs.get("cache_dirs", [])
        prefix_name = kwargs.get("prefix_name", "")

        # prompt return if no cache_dirs specified
        if len(cache_dirs) == 0:
            return func(*args, **kwargs)

        # step_1: generate the SHA1 and cachefile name
        #         baseon given kwargs
        cache_prefix = __get_instrument_name(wksp_name)

        cache_filenames, ascii_hash = __get_cache_name(
            wksp_name,
            abs_method,
            cache_dirs=cache_dirs,
            prefix_name=cache_prefix)

        # step_2: try load the cached data from disk
        found_sample, found_container, abs_wksp_sample, abs_wksp_container, cache_filename = __load_cached_data(
            cache_filenames,
            ascii_hash,
            abs_method=abs_method,
            prefix_name=prefix_name,
        )

        # step_3: calculation
        if (abs_method == "SampleOnly") and found_sample:
            # Chen: why is this blowing things up?
            return abs_wksp_sample, ""
        else:
            if found_sample and found_container:
                # cache is available in memory now, skip calculation
                return abs_wksp_sample, abs_wksp_container
            else:
                # no cache found, need calculation
                log = Logger('calc_absorption_corr_using_wksp')
                if cache_filename:
                    log.information(f"Storing cached data in {cache_filename}")

                abs_wksp_sample, abs_wksp_container = func(*args, **kwargs)

                # set SHA1 to workspace
                mtd[abs_wksp_sample].mutableRun()["absSHA1"] = ascii_hash
                if abs_wksp_container != "":
                    mtd[abs_wksp_container].mutableRun(
                    )["absSHA1"] = ascii_hash

                # save to disk
                SaveNexusProcessed(InputWorkspace=abs_wksp_sample,
                                   Filename=cache_filename)
                if abs_wksp_container != "":
                    SaveNexusProcessed(InputWorkspace=abs_wksp_container,
                                       Filename=cache_filename,
                                       Append=True)

                return abs_wksp_sample, abs_wksp_container
Ejemplo n.º 28
0
def reduce(input_file, output_dir):
    ws = Load(input_file)
    file_name = os.path.join(output_dir, 'load-successful.nxs')
    SaveNexusProcessed(InputWorkspace=ws, Filename=file_name)