Beispiel #1
0
def _add_and_group_caad(fit_namer, vesuvio_input, suffices,
                        normalised_workspaces, summed_workspaces, indices):
    normalised_names = [
        fit_namer.normalised_group_name + "_" + suffices[index]
        for index in indices
    ]
    summed_names = [
        fit_namer.caad_group_name + "_" + suffices[index] for index in indices
    ]
    name_count = dict()

    for name, workspace in zip(normalised_names + summed_names,
                               normalised_workspaces + summed_workspaces):
        if name not in name_count:
            mtd.addOrReplace(name, workspace)
            name_count[name] = 1
        else:
            name_count[name] += 1
            mtd.addOrReplace(name + str(name_count[name]), workspace)

    map(_create_spectra_axis_copier(vesuvio_input.sample_data),
        normalised_workspaces)

    GroupWorkspaces(InputWorkspaces=normalised_workspaces,
                    OutputWorkspace=fit_namer.normalised_group_name)
    GroupWorkspaces(InputWorkspaces=summed_workspaces,
                    OutputWorkspace=fit_namer.caad_group_name)
def GenerateEventsFilterFromFiles(filenames, OutputWorkspace,
                                  InformationWorkspace, **kwargs):

    logName = kwargs.get('LogName', None)
    minValue = kwargs.get('MinimumLogValue', None)
    maxValue = kwargs.get('MaximumLogValue', None)
    logInterval = kwargs.get('LogValueInterval', None)
    unitOfTime = kwargs.get('UnitOfTime', 'Nanoseconds')

    # TODO - handle multi-file filtering. Delete this line once implemented.
    if len(filenames) == 1:
        error = 'Multi-file filtering is not yet supported. (Stay tuned...)'
        raise Exception(error)

    for i, filename in enumerate(filenames):
        Load(Filename=filename, OutputWorkspace=filename)
        splitws, infows = GenerateEventsFilter(InputWorkspace=filename,
                                               UnitOfTime=unitOfTime,
                                               LogName=logName,
                                               MinimumLogValue=minValue,
                                               MaximumLogValue=maxValue,
                                               LogValueInterval=logInterval)
        if i == 0:
            GroupWorkspaces(splitws, OutputWorkspace=OutputWorkspace)
            GroupWorkspaces(infows, OutputWorkspace=InformationWorkspace)
        else:
            mtd[OutputWorkspace].add(splitws)
            mtd[InformationWorkspace].add(infows)
    return
Beispiel #3
0
    def test_DNSVanadiumCorrection_Groups(self):
        outputWorkspaceName = "DNSComputeDetCorrCoefsTest_Test1"
        dataY = np.linspace(2, 13.5, 24)
        sfvana2 = create_fake_dns_workspace('sfvana2',
                                            dataY=dataY,
                                            flipper='ON',
                                            angle=-8.54,
                                            loadinstrument=True)
        nsfvana2 = create_fake_dns_workspace('nsfvana2',
                                             dataY=dataY,
                                             flipper='OFF',
                                             angle=-8.54,
                                             loadinstrument=True)
        vanagroupsf = GroupWorkspaces([self.sfvanaws, sfvana2])
        vanagroupnsf = GroupWorkspaces([self.nsfvanaws, nsfvana2])

        dataY.fill(1.5)
        sfbg2 = create_fake_dns_workspace('sfbg2',
                                          dataY=dataY,
                                          flipper='ON',
                                          angle=-8.54,
                                          loadinstrument=True)
        nsfbg2 = create_fake_dns_workspace('nsfbg2',
                                           dataY=dataY,
                                           flipper='OFF',
                                           angle=-8.54,
                                           loadinstrument=True)
        bggroupsf = GroupWorkspaces([self.sfbkgrws, sfbg2])
        bggroupnsf = GroupWorkspaces([self.nsfbkgrws, nsfbg2])
        alg_test = run_algorithm(
            "DNSComputeDetEffCorrCoefs",
            VanadiumWorkspaces=[vanagroupsf, vanagroupnsf],
            BackgroundWorkspaces=[bggroupsf, bggroupnsf],
            OutputWorkspace=outputWorkspaceName)

        self.assertTrue(alg_test.isExecuted())
        # check whether the data are correct
        group = AnalysisDataService.retrieve(outputWorkspaceName)
        self.assertTrue(isinstance(group, WorkspaceGroup))
        self.assertEqual(2, group.getNumberOfEntries())
        res1 = group.getItem(0)
        res2 = group.getItem(1)
        # dimensions
        self.assertEqual(24, res1.getNumberHistograms())
        self.assertEqual(24, res2.getNumberHistograms())
        self.assertEqual(2, res1.getNumDims())
        self.assertEqual(2, res2.getNumDims())
        # reference data
        refdata = np.linspace(0.08, 1.92, 24)
        # data array
        for i in range(24):
            self.assertAlmostEqual(refdata[i], res1.readY(i)[0])
            self.assertAlmostEqual(refdata[i], res2.readY(i)[0])
        wslist = [
            outputWorkspaceName, 'sfvana2', 'nsfvana2', 'sfbg2', 'nsfbg2'
        ]
        for wsname in wslist:
            run_algorithm("DeleteWorkspace", Workspace=wsname)
        return
Beispiel #4
0
 def create_log_workspace_group(self):
     # run information table
     run_info = self.make_runinfo_table()
     self._log_workspaces = GroupWorkspaces([run_info], OutputWorkspace='logs')
     # a table per logs
     logs = get_setting(output_settings.INTERFACES_SETTINGS_GROUP, output_settings.ENGINEERING_PREFIX, "logs")
     if logs:
         self._log_names = logs.split(',')
         for log in self._log_names:
             self.make_log_table(log)
             self._log_workspaces.add(log)
Beispiel #5
0
    def test_when_nested_workspaces_are_being_saved_from_the_ADS(self, logger):
        CreateSampleWorkspace(OutputWorkspace="ws1")
        CreateSampleWorkspace(OutputWorkspace="ws2")
        CreateSampleWorkspace(OutputWorkspace="ws3")
        CreateSampleWorkspace(OutputWorkspace="ws4")
        GroupWorkspaces(InputWorkspaces="ws1,ws2", OutputWorkspace="group1")
        GroupWorkspaces(InputWorkspaces="ws4,ws3", OutputWorkspace="group2")
        ADS.addToGroup("group2", "group1")
        ws_saver = workspacesaver.WorkspaceSaver(self.working_directory)

        ws_saver.save_workspaces(["group2"])

        self.assertListEqual(["group1", "group2", "ws1", "ws2", "ws3", "ws4"], ADS.getObjectNames())
        logger.warning.assert_called_with(u'Couldn\'t save workspace in project: "group2" because SaveNexusProcessed: '
                                          u'NeXus files do not support nested groups of groups')
Beispiel #6
0
def calibrate_banks(workspace: WorkspaceTypes, bank_selection: str,
                    calibration_group: str = 'calibrations',
                    mask_group: str = 'masks',
                    fit_group: str = 'fits',
                    **kwargs) -> Tuple[WorkspaceGroup, Optional[WorkspaceGroup]]:
    r"""
    Calibrate the tubes in a selection of banks, and assess their goodness-of-fit with an acceptance function.

    For each bank, creates one table of calibrated detector IDs, one table of non-calibrated detector IDs, and one
    Workspace2D object containing, for each tube, the typical deviation for the position of the wire shadows
    with respect to the average positions for the while bank. A Z-score measure of these deviations is also stored.

    For bank with bank number N, the following workspaces are created: TableWorkspace calibN,
    TableWorkspace maskN (if one or more tubes failed to calibrate), and Workspace2D fitN.

    This function produces the following temporary workspaces: 'CalibTable' (TableWorkspace),
    'PeakTable' (TableWorkspace), 'parameters_table' (WorkspaceGroup), and 'acceptance' (Workspace2D).
    Thus, any extant workspace having any of these names will be overwritten.

    :param workspace: input Workspace2D containing total neutron counts per pixel
    :param bank_selection: selection string, such as '32-36,38,40-43'
    :param calibration_group: name of the output WorkspaceGroup containing the calibration tables for each bank
    :param mask_group: name of the output WorkspaceGroup containing the mask tables for each bank
    :param fit_group: name of the output WorkspaceGroup gathering the Workspace2D objects that hold the tube
        success criterion results, as well as the optimized polynomial coefficients of the quadratic function
        fitting the shadow locations in the tube to the known Y-coordinate for the wires.
    :param kwargs: optional parameters to be passed on to `calibrate_bank`

    :return: handles to the calibrations and masks WorkspaceGroup objects
    """
    # create a list of banks names

    # Calibrate each bank
    calibrations, masks, fits = list(), list(), list()
    for n in bank_numbers(bank_selection):
        calibration, mask = calibrate_bank(workspace, 'bank' + n, 'calib' + n, 'mask' + n,
                                           fit_results='fit' + n, **kwargs)
        fits.append(mtd['fit' + n])
        calibrations.append(calibration)
        if mask is not None:
            masks.append(mask)
    # Create the group workspaces
    GroupWorkspaces(InputWorkspaces=calibrations, OutputWorkspace=calibration_group)
    GroupWorkspaces(InputWorkspaces=fits, OutputWorkspace=fit_group)
    if len(masks) > 0:
        GroupWorkspaces(InputWorkspaces=masks, OutputWorkspace=mask_group)

    return mtd[calibration_group], None if len(masks) == 0 else mtd[mask_group]
Beispiel #7
0
    def runTest(self):

        beams = '2866,2867+2868,2878'
        containers = '2888+2971,2884+2960,2880+2949'
        container_tr = '2870+2954'
        beam_tr = '2867+2868'
        samples = ['2889,2885,2881', '2887,2883,2879', '3187,3177,3167']
        sample_tr = ['2871', '2869', '3172']
        thick = [0.1, 0.2, 0.2]

        # reduce samples
        # this also tests that already loaded workspace can be passed instead of a file
        LoadNexusProcessed(Filename='sens-lamp.nxs',
                           OutputWorkspace='sens-lamp')
        for i in range(len(samples)):
            SANSILLAutoProcess(SampleRuns=samples[i],
                               BeamRuns=beams,
                               ContainerRuns=containers,
                               MaskFiles='mask1.nxs,mask2.nxs,mask3.nxs',
                               SensitivityMaps='sens-lamp',
                               SampleTransmissionRuns=sample_tr[i],
                               ContainerTransmissionRuns=container_tr,
                               TransmissionBeamRuns=beam_tr,
                               SampleThickness=thick[i],
                               CalculateResolution='MildnerCarpenter',
                               OutputWorkspace='iq_s' + str(i + 1),
                               BeamRadius='0.05,0.05,0.05',
                               TransmissionBeamRadius=0.05,
                               StitchReferenceIndex=1)

        GroupWorkspaces(InputWorkspaces=['iq_s1', 'iq_s2', 'iq_s3'],
                        OutputWorkspace='out')
    def runTest(self):

        beams = '2866,2867+2868,2878'
        containers = '2888+2971,2884+2960,2880+2949'
        container_tr = '2870+2954'
        beam_tr = '2867+2868'
        samples = ['2889,2885,2881', '2887,2883,2879', '3187,3177,3167']
        sample_tr = ['2871', '2869', '3172']
        thick = [0.1, 0.2, 0.2]

        # reduce samples
        for i in range(len(samples)):
            SANSILLAutoProcess(SampleRuns=samples[i],
                               BeamRuns=beams,
                               ContainerRuns=containers,
                               MaskFiles='mask1.nxs,mask2.nxs,mask3.nxs',
                               SensitivityMaps='sens-lamp.nxs',
                               SampleTransmissionRuns=sample_tr[i],
                               ContainerTransmissionRuns=container_tr,
                               TransmissionBeamRuns=beam_tr,
                               SampleThickness=thick[i],
                               CalculateResolution='MildnerCarpenter',
                               OutputWorkspace='iq_s' + str(i + 1))

        GroupWorkspaces(InputWorkspaces=['iq_s1', 'iq_s2', 'iq_s3'],
                        OutputWorkspace='out')
    def runTest(self):

        beams = '2866,2867+2868,2878'
        containers = '2888+2971,2884+2960,2880+2949'
        container_tr = '2870+2954'
        beam_tr = '2867+2868'
        sample = '3187,3177,3167'
        sample_tr = '2869'
        thick = 0.2

        # reduce samples
        # this also tests that already loaded workspace can be passed instead of a file
        LoadNexusProcessed(Filename='sens-lamp.nxs',
                           OutputWorkspace='sens-lamp')
        SANSILLAutoProcess(SampleRuns=sample,
                           BeamRuns=beams,
                           ContainerRuns=containers,
                           MaskFiles='mask1.nxs,mask2.nxs,mask3.nxs',
                           SensitivityMaps='sens-lamp',
                           SampleTransmissionRuns=sample_tr,
                           ContainerTransmissionRuns=container_tr,
                           TransmissionBeamRuns=beam_tr,
                           SampleThickness=thick,
                           CalculateResolution='MildnerCarpenter',
                           NumberOfWedges=2,
                           OutputWorkspace='iq')

        GroupWorkspaces(InputWorkspaces=[
            'iq_1', 'iq_2', 'iq_3', 'iq_wedge_1_1', 'iq_wedge_1_2',
            'iq_wedge_1_3', 'iq_wedge_2_1', 'iq_wedge_2_2', 'iq_wedge_2_3'
        ],
                        OutputWorkspace='out')
    def runTest(self):

        absorber = '002227'
        tr_beam = '002192'
        can_tr = '002193'
        empty_beam = '002219'
        can = '002228'
        mask = 'D33Mask2.nxs'

        SANSILLAutoProcess(
            SampleRuns='001464',
            SampleTransmissionRuns='002197',
            MaskFiles=mask,
            AbsorberRuns=absorber,
            BeamRuns=empty_beam,
            ContainerRuns=can,
            ContainerTransmissionRuns=can_tr,
            TransmissionBeamRuns=tr_beam,
            OutputWorkspace='iphiq',
            OutputPanels=True,
            NumberOfWedges=60,
            OutputType='I(Phi,Q)',
            BeamRadius=0.05,
            TransmissionBeamRadius=0.05
        )

        GroupWorkspaces(InputWorkspaces=['iphiq', 'iphiq_panels'],
                        OutputWorkspace='out')
    def runTest(self):

        PowderDiffILLDetEffCorr(CalibrationRun='967076.nxs',
                                OutputWorkspace='calib',
                                OutputResponseWorkspace='response')
        GroupWorkspaces(InputWorkspaces=['calib', 'response'],
                        OutputWorkspace='group')
Beispiel #12
0
 def create_fit_tables(self):
     wslist = []  # ws to be grouped
     # extract fit parameters and errors
     nruns = len(
         self.get_loaded_ws_list())  # num of rows of output workspace
     # get unique set of function parameters across all workspaces
     func_params = set(
         chain(*[
             list(d['results'].keys()) for d in self._fit_results.values()
         ]))
     for param in func_params:
         # get max number of repeated func in a model (num columns of output workspace)
         nfuncs = max([
             len(d['results'][param]) for d in self._fit_results.values()
             if param in d['results']
         ])
         # make output workspace
         ipeak = list(range(1, nfuncs + 1)) * nruns
         ws = CreateWorkspace(OutputWorkspace=param,
                              DataX=ipeak,
                              DataY=ipeak,
                              NSpec=nruns)
         # axis for labels in workspace
         axis = TextAxis.create(nruns)
         for iws, wsname in enumerate(self.get_active_ws_name_list()):
             if wsname in self._fit_results and param in self._fit_results[
                     wsname]['results']:
                 fitvals = array(
                     self._fit_results[wsname]['results'][param])
                 data = vstack(
                     (fitvals, full((nfuncs - fitvals.shape[0], 2), nan)))
             else:
                 data = full((nfuncs, 2), nan)
             ws.setY(iws, data[:, 0])
             ws.setE(iws, data[:, 1])
             # label row
             axis.setLabel(iws, wsname)
         ws.replaceAxis(1, axis)
         wslist += [ws]
     # table for model summary/info
     model = CreateEmptyTableWorkspace(OutputWorkspace='model')
     model.addColumn(type="str", name="Workspace")
     model.addColumn(type="float", name="chisq/DOF"
                     )  # always is for LM minimiser (users can't change)
     model.addColumn(type="str", name="status")
     model.addColumn(type="str", name="Model")
     for iws, wsname in enumerate(self.get_active_ws_name_list()):
         if wsname in self._fit_results:
             row = [
                 wsname, self._fit_results[wsname]['costFunction'],
                 self._fit_results[wsname]['status'],
                 self._fit_results[wsname]['model']
             ]
             self.write_table_row(model, row, iws)
         else:
             self.write_table_row(model, ['', nan, ''], iws)
     wslist += [model]
     group_name = self._log_workspaces.name().split('_log')[0] + '_fits'
     self._fit_workspaces = GroupWorkspaces(wslist,
                                            OutputWorkspace=group_name)
    def runTest(self):

        PowderILLEfficiency(CalibrationRun='167339',
                            OutputWorkspace='calib',
                            InterpolateOverlappingAngles=True,
                            OutputResponseWorkspace='response')
        GroupWorkspaces(InputWorkspaces=['calib','response'], OutputWorkspace='group')
    def runTest(self):

        PowderILLEfficiency(CalibrationRun='967076.nxs',
                            OutputWorkspace='calib',
                            OutputResponseWorkspace='response')
        GroupWorkspaces(InputWorkspaces=['calib', 'response'],
                        OutputWorkspace='group')
Beispiel #15
0
 def _create_ads_sample_workspaces(self):
     ws_ptrs = []
     for i in range(2):
         self._ads_names.append(str(uuid.uuid4()))
         ws_ptrs.append(CreateSampleWorkspace(OutputWorkspace=self._ads_names[-1]))
     ws_group = GroupWorkspaces(InputWorkspaces=ws_ptrs, OutputWorkspace=str(uuid.uuid4()))
     return ws_group
Beispiel #16
0
    def runTest(self):

        beams = '2866,2867+2868,2878'
        containers = '2888+2971,2884+2960,2880+2949'
        container_tr = '2870+2954'
        beam_tr = '2867+2868'
        sample = '3187,3177,3167'
        sample_tr = '2869'
        thick = 0.2

        SANSILLAutoProcess(SampleRuns=sample,
                           BeamRuns=beams,
                           ContainerRuns=containers,
                           SensitivityMaps='sens-lamp.nxs',
                           MaskFiles='mask1.nxs,mask2.nxs,mask3.nxs',
                           SampleTransmissionRuns=sample_tr,
                           ContainerTransmissionRuns=container_tr,
                           TransmissionBeamRuns=beam_tr,
                           SampleThickness=thick,
                           CalculateResolution='MildnerCarpenter',
                           NumberOfWedges=2,
                           OutputWorkspace='iq',
                           BeamRadius='0.05,0.05,0.05',
                           TransmissionBeamRadius=0.05)

        GroupWorkspaces(InputWorkspaces=[
            'iq_#1_d39.0m_c40.5m_w5.6A', 'iq_#2_d8.0m_c8.0m_w5.6A',
            'iq_#3_d2.0m_c5.5m_w5.6A', 'iq_wedge_1_#1_d39.0m_c40.5m_w5.6A',
            'iq_wedge_1_#2_d8.0m_c8.0m_w5.6A',
            'iq_wedge_1_#3_d2.0m_c5.5m_w5.6A',
            'iq_wedge_2_#1_d39.0m_c40.5m_w5.6A',
            'iq_wedge_2_#2_d8.0m_c8.0m_w5.6A',
            'iq_wedge_2_#3_d2.0m_c5.5m_w5.6A'
        ],
                        OutputWorkspace='out')
Beispiel #17
0
 def runTest(self):
     """
     Override parent method, does the work of running the test
     """
     try:
         # Load files and create workspace group
         names = ('BASIS_63652_sqw', 'BASIS_63700_sqw')
         [
             Load(Filename=name + '.nxs', OutputWorkspace=name)
             for name in names
         ]
         GroupWorkspaces(InputWorkspaces=names,
                         OutputWorkspace='elwin_input')
         ElasticWindowMultiple(InputWorkspaces='elwin_input',
                               IntegrationRangeStart=-0.0035,
                               IntegrationRangeEnd=0.0035,
                               BackgroundRangeStart=-0.1,
                               BackgroundRangeEnd=-0.05,
                               SampleEnvironmentLogName='SensorA',
                               SampleEnvironmentLogValue='average',
                               OutputInQ='outQ',
                               OutputInQSquared='outQ2',
                               OutputELF='ELF',
                               OutputELT='ELT')
     finally:
         self.preptear()
Beispiel #18
0
    def runTest(self):

        absorber = '002227'
        tr_beam = '002192'
        can_tr = '002193'
        empty_beam = '002219'
        can = '002228'
        mask = 'D33Mask2.nxs'

        SANSILLAutoProcess(SampleRuns='001464',
                           SampleTransmissionRuns='002197',
                           MaskFiles=mask,
                           AbsorberRuns=absorber,
                           BeamRuns=empty_beam,
                           ContainerRuns=can,
                           ContainerTransmissionRuns=can_tr,
                           TransmissionBeamRuns=tr_beam,
                           OutputWorkspace='iq',
                           OutputPanels=True,
                           BeamRadius=0.05,
                           TransmissionBeamRadius=0.05)

        GroupWorkspaces(InputWorkspaces=[
            'iq', 'iq_back_detector', 'iq_front_detector_bottom',
            'iq_front_detector_left', 'iq_front_detector_right',
            'iq_front_detector_top'
        ],
                        OutputWorkspace='out')
 def loaded_workspace_as_group(self, run):
     if self.is_multi_period():
         workspace_list = [wrapper._workspace_name for wrapper in self._loaded_data.get_data(
             run=run, instrument=self.instrument)['workspace']['OutputWorkspace']]
         return GroupWorkspaces(InputWorkspaces=workspace_list, OutputWorkspace='__temp_group')
     else:
         return self._loaded_data.get_data(run=run, instrument=self.instrument)['workspace']['OutputWorkspace'][0].workspace
Beispiel #20
0
 def test_inputgroup(self):
     group = GroupWorkspaces([self._input_ws])
     OutputWorkspaceName = "cropped_ws"
     alg_test = run_algorithm("TOFTOFCropWorkspace",
                              InputWorkspace=group,
                              OutputWorkspace=OutputWorkspaceName)
     self.assertTrue(alg_test.isExecuted())
 def test_no_TOF_input_workspace_groups_remain_unchanged(self):
     self._create_workspace_wavelength(12345)
     self._create_workspace_wavelength(67890)
     GroupWorkspaces('12345, 67890', OutputWorkspace='no_TOF_group')
     args = self._default_options
     args['InputRunList'] = '12345, 67890'
     outputs = ['no_TOF_group', 'TOF_12345+67890', '12345', '67890']
     self._assert_run_algorithm_succeeds(args, outputs)
 def test_fails_with_mixed_unit_input_workspace_group(self):
     self._create_workspace(13460, 'TOF_')
     self._create_workspace(13463, 'TOF_')
     self._create_workspace_wavelength(12345)
     GroupWorkspaces('TOF_13463, 12345', OutputWorkspace='mixed_unit_group')
     args = self._default_options
     args['InputRunList'] = '13460, mixed_unit_group'
     self._assert_run_algorithm_throws(args)
 def test_groups_same_logs(self):
     GroupWorkspaces([self.ws1, self.ws2], OutputWorkspace='group')
     lognames = 'wavelength,polarisation'
     alg_test = run_algorithm("CompareSampleLogs", InputWorkspaces='group', SampleLogs=lognames,
                              Tolerance=0.01)
     self.assertTrue(alg_test.isExecuted())
     result = alg_test.getProperty('Result').value
     self.assertEqual('', result)
Beispiel #24
0
 def create_log_table(self):
     # run information table
     run_info = CreateEmptyTableWorkspace()
     run_info.addColumn(type="str", name="Instrument")
     run_info.addColumn(type="int", name="Run")
     run_info.addColumn(type="int", name="Bank")
     run_info.addColumn(type="float", name="uAmps")
     run_info.addColumn(type="str", name="Title")
     self._log_workspaces = GroupWorkspaces([run_info], OutputWorkspace='logs')
     # a table per logs
     logs = get_setting(path_handling.INTERFACES_SETTINGS_GROUP, path_handling.ENGINEERING_PREFIX, "logs")
     if logs:
         for log in logs.split(','):
             ws = CreateEmptyTableWorkspace(OutputWorkspace=log)
             ws.addColumn(type="float", name="avg")
             ws.addColumn(type="float", name="stdev")
             self._log_workspaces.add(log)
 def _rename_and_group_workspaces(index, output_workspaces):
     to_group = []
     for workspace in output_workspaces:
         CloneWorkspace(InputWorkspace=workspace,
                        OutputWorkspace='{}_{}'.format(workspace, index))
         to_group.append('{}_{}'.format(workspace, index))
     GroupWorkspaces(InputWorkspaces=to_group,
                     OutputWorkspace='Iteration_{}'.format(index))
Beispiel #26
0
def _group_corrections(vesuvio_output, sample_runs, iteration):
    prefix = sample_runs + "_iteration_" + str(iteration)
    corrections = _add_corrections_to_ads(vesuvio_output.correction_workspaces, prefix + "_correction_")
    corrected = _add_corrections_to_ads(vesuvio_output.corrected_workspaces, prefix + "_corrected_")

    if corrections or corrected:
        return GroupWorkspaces(InputWorkspaces=corrections + corrected, OutputWorkspace=prefix)
    return None
Beispiel #27
0
def _add_fit_output_to_ads(vesuvio_output, fit_namer):
    if vesuvio_output.fit_output_workspaces:
        return GroupWorkspaces(InputWorkspaces=[
            workspace.getName()
            for workspace in vesuvio_output.fit_output_workspaces
        ],
                               OutputWorkspace=fit_namer.fit_output_group_name)
    return None
Beispiel #28
0
    def PyExec(self):
        self.data_directory = self.getPropertyValue('Directory')
        # Converts the specified functions into full filenames and finds them in directory
        data_files = [
            os.path.splitext(f)[0]
            for f in fnmatch.filter(os.listdir(self.data_directory), '*.dat')
        ]
        logger.debug('All data files: {0}'.format(data_files))
        chosen_functions = [x for x in self.getProperty('Functions').value]
        func_names = [
            f for f in data_files if f.replace(',', '') in chosen_functions
        ]
        prog_reporter = Progress(self,
                                 start=0.0,
                                 end=1.0,
                                 nreports=len(func_names))
        logger.debug('Functions to load: {0}'.format(func_names))
        loaded_function_workspaces = []
        out_ws_name = self.getPropertyValue('OutputWorkspace')

        for function in func_names:
            prog_reporter.report('Loading {0} function'.format(function))
            # Loads the two axes
            y_axis = self.read_axis(function)
            x_axis = self.read_axis(y_axis[3])

            # Converts the x-axis units and sets all axis properties
            x_axis = self.axis_conversion(x_axis[0], x_axis[1], y_axis[3])
            y_data = y_axis[0]
            y_name = y_axis[2]
            x_data = x_axis[0]
            x_unit = x_axis[1]
            x_name = x_axis[2]

            # Convolutes the data if required
            if self.getPropertyValue('ResolutionConvolution'
                                     ) == 'TOSCA' and x_name == 'frequency':
                resolutions = self.gaussians(x_data, self.TOSCA_resfunction)
                y_data = self.convolutor(y_data, resolutions, x_data)
                logger.information('Function ' + str(y_name) +
                                   ' will be convoluted')

            # Create the workspace for function
            ws_title = out_ws_name + '(' + function + ')'
            CreateWorkspace(OutputWorkspace=ws_title,
                            DataX=x_data,
                            DataY=y_data,
                            UnitX=x_unit,
                            WorkspaceTitle=ws_title)

            loaded_function_workspaces.append(ws_title)

        if len(loaded_function_workspaces) == 0:
            raise RuntimeError('Failed to load any functions for data')
        GroupWorkspaces(InputWorkspaces=loaded_function_workspaces,
                        OutputWorkspace=out_ws_name)
        # Set the output workspace
        self.setProperty('OutputWorkspace', out_ws_name)
    def test_empty_group_workspaces(self):
        CreateSampleWorkspace(OutputWorkspace="ws")
        CreateSampleWorkspace(OutputWorkspace="ws1")
        GroupWorkspaces(OutputWorkspace="Group", InputWorkspaces="ws,ws1")
        group_workspace = ADS.retrieve("Group")
        group_workspace.remove("ws")
        group_workspace.remove("ws1")

        self.assertTrue(self.pr_saver._empty_group_workspace(group_workspace))
Beispiel #30
0
def _create_group_test_workspace(output_name):
    widths = [15.0, 17.5, 20.0, 22.5, 25.0, 27.5, 30.0]
    workspace_names = ["workspace_" + str(i) for i in range(len(widths))]

    for i, (fwhm, name) in enumerate(zip(widths, workspace_names)):
        _create_single_test_workspace(fwhm, name, i)

    GroupWorkspaces(InputWorkspaces=workspace_names,
                    OutputWorkspace=output_name)