def setUp(self): """ Loads the reduced container and sample files. """ can_ws = LoadNexusProcessed(Filename='irs26173_graphite002_red.nxs') red_ws = LoadNexusProcessed(Filename='irs26176_graphite002_red.nxs') self._can_ws = can_ws self._red_ws = red_ws
def test_save_banks_check_contents(self): save_banks(self.wksp, self.out_nxs, 'wksp', '.') out_wksp = LoadNexusProcessed(self.out_nxs) self.assertEqual(out_wksp.blocksize(), self.wksp.blocksize()) self.assertEqual(out_wksp.getNumberHistograms(), self.wksp.getNumberHistograms()) self.assertTrue(np.array_equal(out_wksp.getAxis(0).extractValues(), self.wksp.getAxis(0).extractValues()) )
def load_banks(run: Union[int, str], bank_selection: str, output_workspace: str) -> Workspace2D: r""" Load events only for the selected banks, and don't load metadata. If the file is not an events file, but a Nexus processed file, the bank_selection is ignored. :param run: run-number or filename to an Event nexus file or a processed nexus file :param bank_selection: selection string, such as '10,12-15,17-21' :param output_workspace: name of the output workspace containing counts per pixel :return: workspace containing counts per pixel. Events in each pixel are integrated into neutron counts. """ # Resolve the input run if isinstance(run, int): file_descriptor = f'CORELLI_{run}' else: # a run number given as a string, or the path to a file try: file_descriptor = f'CORELLI_{str(int(run))}' except ValueError: # run is path to a file filename = run assert path.exists(filename), f'File {filename} does not exist' file_descriptor = filename bank_names = ','.join(['bank' + b for b in bank_numbers(bank_selection)]) try: LoadEventNexus(Filename=file_descriptor, OutputWorkspace=output_workspace, BankName=bank_names, LoadMonitors=False, LoadLogs=True) except (RuntimeError, ValueError): LoadNexusProcessed(Filename=file_descriptor, OutputWorkspace=output_workspace) Integration(InputWorkspace=output_workspace, OutputWorkspace=output_workspace) return mtd[output_workspace]
def runTest(self): beams = '2866,2867+2868,2878' containers = '2888+2971,2884+2960,2880+2949' container_tr = '2870+2954' beam_tr = '2867+2868' sample = '3187,3177,3167' sample_tr = '2869' thick = 0.2 # reduce samples # this also tests that already loaded workspace can be passed instead of a file LoadNexusProcessed(Filename='sens-lamp.nxs', OutputWorkspace='sens-lamp') SANSILLAutoProcess(SampleRuns=sample, BeamRuns=beams, ContainerRuns=containers, MaskFiles='mask1.nxs,mask2.nxs,mask3.nxs', SensitivityMaps='sens-lamp', SampleTransmissionRuns=sample_tr, ContainerTransmissionRuns=container_tr, TransmissionBeamRuns=beam_tr, SampleThickness=thick, CalculateResolution='MildnerCarpenter', NumberOfWedges=2, OutputWorkspace='iq') GroupWorkspaces(InputWorkspaces=[ 'iq_1', 'iq_2', 'iq_3', 'iq_wedge_1_1', 'iq_wedge_1_2', 'iq_wedge_1_3', 'iq_wedge_2_1', 'iq_wedge_2_2', 'iq_wedge_2_3' ], OutputWorkspace='out')
def setUpClass(cls): cls.workspaces_temporary = list( ) # workspaces to be deleted at tear-down # Load a CORELLI file containing data for bank number 20 (16 tubes) config.appendDataSearchSubDir('CORELLI/calibration') for directory in config.getDataSearchDirs(): if 'UnitTest' in directory: data_dir = path.join(directory, 'CORELLI', 'calibration') break wire_positions_pixels = [ 0.03043, 0.04544, 0.06045, 0.07546, 0.09047, 0.10548, 0.12049, 0.13550, 0.15052, 0.16553, 0.18054, 0.19555, 0.21056, 0.22557 ] # in mili-meters, 14 wires workspace = 'CORELLI_123455_bank20' LoadNexusProcessed(Filename=path.join(data_dir, workspace + '.nxs'), OutputWorkspace=workspace) cls.workspaces_temporary.append( workspace) # delete workspace at tear-down wire_positions_pixels = wire_positions(units='pixels')[1:-1] wire_count = len(wire_positions_pixels) fit_parameters = TubeCalibFitParams(wire_positions_pixels, height=-1000, width=4, margin=7) fit_parameters.setAutomatic(True) cls.corelli = { 'workspace': workspace, 'bank_name': 'bank20', 'wire_positions': wire_positions(units='meters')[1:-1], 'peaks_form': [1] * wire_count, # signals we'll be fitting dips (peaks with negative heights) 'fit_parameters': fit_parameters, }
def setUpClass(cls): ConfigService.Instance().setString("default.facility", "ISIS") # A small workspace for general tests test_workspace = LoadNexusProcessed(Filename="LOQ48127") cls.immutable_test_workspace = test_workspace # A full workspace on which we can test region of interest selection region_of_interest_workspace = Load(Filename="LOQ74044") cls.region_of_interest_workspace = region_of_interest_workspace # A region of interest xml file roi_content = ("<?xml version=\"1.0\"?>\n" "\t<detector-masking>\n" "\t\t<group>\n" "\t\t\t<detids>6990-6996</detids>\n" "\t\t</group>\n" "\t</detector-masking>\n") cls.roi_file_path = cls._get_path(cls.roi_file) cls._save_file(cls.roi_file_path, roi_content) # A mask file mask_content = ("<?xml version=\"1.0\"?>\n" "\t<detector-masking>\n" "\t\t<group>\n" "\t\t\t<detids>6991</detids>\n" "\t\t</group>\n" "\t</detector-masking>\n") cls.mask_file_path = cls._get_path(cls.mask_file) cls._save_file(cls.mask_file_path, mask_content) ConfigService.Instance().setString("default.facility", " ")
def __loadCacheFile(self, filename, wkspname): '''@returns True if a file was loaded''' if os.path.exists(filename): self.log().notice('Loading cache from {}'.format(filename)) else: return False LoadNexusProcessed(Filename=filename, OutputWorkspace=wkspname) # TODO LoadNexusProcessed has a bug. When it finds the # instrument name without xml it reads in from an IDF # in the instrument directory. editinstrargs = {} for name in PROPS_FOR_INSTR: prop = self.getProperty(name) if not prop.isDefault: editinstrargs[name] = prop.value if editinstrargs: try: EditInstrumentGeometry(Workspace=wkspname, **editinstrargs) except RuntimeError as e: # treat this as a non-fatal error self.log().warning( 'Failed to update instrument geometry in cache file: {}'. format(e)) return True
def runTest(self): beams = '1020,947,1088' containers = '1023,973,1003' container_tr = '1023,988,988' beam_tr = '1020,1119,1119' samples = '1025,975,1005' sample_tr = '1204,990,990' thick = 0.1 # reduce samples # this also tests that already loaded workspace can be passed instead of a file LoadNexusProcessed(Filename='sens-lamp.nxs', OutputWorkspace='sens-lamp') SANSILLAutoProcess( SampleRuns=samples, BeamRuns=beams, ContainerRuns=containers, DefaultMaskFile='edge_masks', MaskFiles='mask_39m_10A,mask_8m_4_6A,mask_1m_4_6A', SensitivityMaps='sens-lamp', SampleTransmissionRuns=sample_tr, ContainerTransmissionRuns=container_tr, TransmissionBeamRuns=beam_tr, SampleThickness=thick, CalculateResolution='MildnerCarpenter', OutputWorkspace='iq_mult_wavelengths', BeamRadius='0.05', TransmissionBeamRadius=0.05, StitchReferenceIndex = 0 )
def runTest(self): beams = '2866,2867+2868,2878' containers = '2888+2971,2884+2960,2880+2949' container_tr = '2870+2954' beam_tr = '2867+2868' samples = ['2889,2885,2881', '2887,2883,2879', '3187,3177,3167'] sample_tr = ['2871', '2869', '3172'] thick = [0.1, 0.2, 0.2] # reduce samples # this also tests that already loaded workspace can be passed instead of a file LoadNexusProcessed(Filename='sens-lamp.nxs', OutputWorkspace='sens-lamp') for i in range(len(samples)): SANSILLAutoProcess(SampleRuns=samples[i], BeamRuns=beams, ContainerRuns=containers, MaskFiles='mask1.nxs,mask2.nxs,mask3.nxs', SensitivityMaps='sens-lamp', SampleTransmissionRuns=sample_tr[i], ContainerTransmissionRuns=container_tr, TransmissionBeamRuns=beam_tr, SampleThickness=thick[i], CalculateResolution='MildnerCarpenter', OutputWorkspace='iq_s' + str(i + 1), BeamRadius='0.05,0.05,0.05', TransmissionBeamRadius=0.05, StitchReferenceIndex=1) GroupWorkspaces(InputWorkspaces=['iq_s1', 'iq_s2', 'iq_s3'], OutputWorkspace='out')
def setUpClass( cls): # called only before running all tests in the test case cls.workspaces_temporary = list( ) # workspaces to be deleted at tear-down # Single tube data. Tube dimensions appropriate for a CORELLI tube def y_quad(n: float) -> float: r""" Example quadratic function, returning the Y-coordinate (meters) versus pixel index `i y_quad(n) = c0 + c1 * n + c2 * n^2. Coefficients c0, c1, and c2 obtained by solving the following equations: y(0) = -0.502 y(128) = 0.001 y(255) = 0.393 # assume a tube with 256 pixels Obtaining: c0 = -0.502 c1 = 0.00435287724834028 c2 = -3.306169908908442e-06 :param n: pixel coordinate """ return -0.502 + 0.00435287724834028 * n - 3.306169908908442e-06 * n * n # assume 11 slits(wires) casting 11 peaks(shadows) onto the tube at the following pixel numbers tube_points = np.linspace(5, 245, 11, endpoint=True) # 5, 29, 53,...,221, 245 # assume the Y-coordinates of the peaks(shadows) given by our quadratic example function ideal_tube_coordinates = [y_quad(n) for n in tube_points] cls.y_quad_data = { 'detector_count': 256, 'peak_count': 11, 'y_quad': y_quad, 'coefficients': { 'A0': -0.502, 'A1': 0.00435287724834028, 'A2': -3.306169908908442e-06 }, 'tube_points': tube_points, 'ideal_tube_coordinates': ideal_tube_coordinates } # Load a CORELLI file containing data for bank number 20 (16 tubes) config.appendDataSearchSubDir('CORELLI/calibration') for directory in config.getDataSearchDirs(): if 'UnitTest' in directory: data_dir = path.join(directory, 'CORELLI', 'calibration') break workspace = 'CORELLI_123455_bank20' LoadNexusProcessed(Filename=path.join(data_dir, workspace + '.nxs'), OutputWorkspace=workspace) assert AnalysisDataService.doesExist(workspace) cls.workspaces_temporary.append( workspace) # delete workspace at tear-down cls.corelli = { 'tube_length': 0.900466, # in meters 'pixels_per_tube': 256, 'workspace': workspace }
def setUpClass(cls): LoadNexusProcessed(Filename="VULCAN_192227_diagnostics.nxs", OutputWorkspace="diagtest") UnGroupWorkspace("diagtest") cls.workspaces = [ "diag_dspacing", "diag_fitted", "diag_fitparam", "strain", "single_strain", "difference", "single_diff", "center_tof" ]
def runTest(self): beams = '947,1088' containers = '973,1003' container_tr = '988' beam_tr = '1119' samples = '975,1005' sample_tr = '990' solvents = '1106,1091' solvent_tr = '1121' thick = 0.1 # this also tests that already loaded workspace can be passed instead of a file LoadNexusProcessed(Filename='sens-lamp.nxs', OutputWorkspace='sens-lamp') # first, process the solvent SANSILLAutoProcess(SampleRuns=solvents, BeamRuns=beams, DefaultMaskFile='edge_masks', MaskFiles='mask_8m_4_6A,mask_1m_4_6A', SensitivityMaps='sens-lamp', SampleTransmissionRuns=solvent_tr, ContainerTransmissionRuns=container_tr, TransmissionBeamRuns=beam_tr, SampleThickness=thick, CalculateResolution='MildnerCarpenter', OutputWorkspace='solvents', BeamRadius='0.05', TransmissionBeamRadius=0.05, ClearCorrected2DWorkspace=False, StitchReferenceIndex=0) tmp_dir = gettempdir() solvent_dir = [ os.path.join(tmp_dir, 'solvent_' + str(i) + '.nxs') for i in range(2) ] SaveNexusProcessed('001106_Sample', solvent_dir[0]) SaveNexusProcessed('001091_Sample', solvent_dir[1]) # reduce samples SANSILLAutoProcess(SampleRuns=samples, BeamRuns=beams, ContainerRuns=containers, DefaultMaskFile='edge_masks', MaskFiles='mask_8m_4_6A,mask_1m_4_6A', SensitivityMaps='sens-lamp', SampleTransmissionRuns=sample_tr, ContainerTransmissionRuns=container_tr, TransmissionBeamRuns=beam_tr, SolventFiles=",".join(solvent_dir), SampleThickness=thick, CalculateResolution='MildnerCarpenter', OutputWorkspace='iq_mult_solvent', BeamRadius='0.05', TransmissionBeamRadius=0.05, StitchReferenceIndex=0)
def _run_ifws(self): # test EFWS+IFWS mixed IndirectILLReductionFWS(Run="165944:165953", SortXAxis=True, OutputWorkspace="ifws") LoadNexusProcessed(Filename="ILLIN16B_FWS.nxs",OutputWorkspace="ref") result = CompareWorkspaces(Workspace1='ifws_red',Workspace2='ref',**self.params) if result[0]: self.assertTrue(result[0]) else: self.assertTrue(result[0],"Mismatch in IFWS: " + result[1].row(0)['Message'])
def runTest(self): raw_ws = LoadNexusProcessed(self.TEST_FILENAME) def get_first_detid_in_bank(bank_pos): starting_pixel_id = 2001 bank_width = 60 return starting_pixel_id + (bank_width * bank_pos) # Banks 1 + 2 roi_str = f"{get_first_detid_in_bank(1)}-{get_first_detid_in_bank(3) - 1}" ReflectometryISISSumBanks(InputWorkspace=raw_ws, ROIDetectorIDs=roi_str, OutputWorkspace=self.OUT_WS_NAME)
def _create_flat_background_test_workspace(workspace_name): LoadNexusProcessed(Filename="LOQ48127", OutputWorkspace=workspace_name) workspace = AnalysisDataService.retrieve(workspace_name) # Rebin to only have four values at 11, 31, 51, 70.5 workspace = Rebin(workspace, "1,20,80") # For each spectrum we set the first two entries to 2 and the other two entries to 4. for index in range(workspace.getNumberHistograms()): data_y = workspace.dataY(index) data_y[0] = 2. data_y[1] = 2. data_y[2] = 4. data_y[3] = 4. return workspace
def _test_impl(self, tmp_dir: Path): file_xml_mask = (tmp_dir / "NOMADTEST.xml").resolve() file_txt_mask = (tmp_dir / "NOMADTEST.txt").resolve() LoadNexusProcessed(Filename='NOM_144974_SingleBin.nxs', OutputWorkspace='NOM_144974') NOMADMedianDetectorTest(InputWorkspace='NOM_144974', ConfigurationFile='NOMAD_mask_gen_config.yml', SolidAngleNorm=False, OutputMaskXML=str(file_xml_mask), OutputMaskASCII=str(file_txt_mask)) self.loaded_ws = LoadMask(Instrument='NOMAD', InputFile=str(file_xml_mask), RefWorkspace='NOM_144974', StoreInADS=False)
def setUpClass(cls) -> None: r""" Load the tests cases for calibrate_bank, consisting of data for only one bank CORELLI_124023_bank10, tube 13 has shadows at pixel numbers quite different from the rest """ config.appendDataSearchSubDir('CORELLI/calibration') for directory in config.getDataSearchDirs(): if 'UnitTest' in directory: data_dir = path.join(directory, 'CORELLI', 'calibration') break cls.workspaces_temporary = list() cls.cases = dict() for bank_case in ('124016_bank10', '123454_bank58', '124023_bank10', '124023_banks_14_15'): workspace = 'CORELLI_' + bank_case LoadNexusProcessed(Filename=path.join(data_dir, workspace + '.nxs'), OutputWorkspace=workspace) cls.cases[bank_case] = workspace cls.workspaces_temporary.append(workspace)
def _run_efws(self): # test EFWS with sum/interpolate options with background and calibration IndirectILLReductionFWS(Run="143720:143728:2", BackgroundRun="143721,143723,143725", CalibrationRun="143727,143729", BackgroundOption="Interpolate", CalibrationOption="Sum", SortXAxis=True, OutputWorkspace="efws") LoadNexusProcessed(Filename="ILLIN16B_EFWS.nxs",OutputWorkspace="ref") result = CompareWorkspaces(Workspace1='efws_0.0_red',Workspace2='ref',**self.params) if result[0]: self.assertTrue(result[0]) else: self.assertTrue(result[0], "Mismatch in EFWS: " + result[1].row(0)['Message'])
def load_banks(filename: str, bank_selection: str, output_workspace: str) -> Workspace2D: r""" Load events only for the selected banks, and don't load metadata. If the file is not an events file, but a Nexus processed file, the bank_selection is ignored. :param filename: Filename to an Event nexus file or a processed nexus file :param bank_selection: selection string, such as '10,12-15,17-21' :param output_workspace: name of the output workspace containing counts per pixel :return: workspace containing counts per pixel. Events in each pixel are integrated into neutron counts. """ assert path.exists(filename), f'File {filename} does not exist' bank_names = ','.join(['bank' + b for b in bank_numbers(bank_selection)]) try: LoadEventNexus(Filename=filename, OutputWorkspace=output_workspace, BankName=bank_names, LoadMonitors=False, LoadLogs=False) except (RuntimeError, ValueError): LoadNexusProcessed(Filename=filename, OutputWorkspace=output_workspace) Integration(InputWorkspace=output_workspace, OutputWorkspace=output_workspace) return mtd[output_workspace]
def test_exec(self): _, file_xml_mask = tempfile.mkstemp(suffix='.xml') _, file_txt_mask = tempfile.mkstemp(suffix='.txt') LoadNexusProcessed(Filename='NOM_144974_SingleBin.nxs', OutputWorkspace='NOM_144974') NOMADMedianDetectorTest(InputWorkspace='NOM_144974', ConfigurationFile='NOMAD_mask_gen_config.yml', SolidAngleNorm=False, OutputMaskXML=file_xml_mask, OutputMaskASCII=file_txt_mask) # verify the XML mask with open(file_xml_mask) as f: contents = f.read() for segment in [ '0-3122', '48847-48900', '65020-65029', '98295-101375' ]: # test a few assert segment in contents # verify the single-column ASCII mask with open(file_txt_mask) as f: contents = f.read() for detector_id in [0, 3122, 48847, 48900, 65020, 65029, 98295]: assert f' {detector_id}\n' in contents
def load_bank_table(bank_id: int, database_path: str, date: str, table_type: str = 'calibration') -> TableWorkspace: """ Function that loads the latest bank calibrated TableWorkspace from a single HDF5 file using corelli format: database_path/bank0ID/type_corelli_bank0ID_YYYYMMDD.nxs :param bank_id bank number that is calibrated :param database_path location of the corelli database (absolute or relative) Example: database/corelli/ for database/corelli/bank001/ database/corelli/bank002/ :param date current day in YYYYMMDD format :param table_type 'calibration', 'mask' or 'fit' :return TableWorkspace with corresponding data """ TableType.assert_valid_type(table_type) verify_date_format('load_bank_table', date) filename: str = filename_bank_table(bank_id, database_path, date, table_type) logger.notice(f'Loading bank{bank_id} {table_type} file from database') outputWS = LoadNexusProcessed(filename) return outputWS
def setUpClass(cls): r""" Load the tests cases for calibrate_bank, consisting of data for only one bank CORELLI_123455_bank20, control bank, it has no problems CORELLI_123454_bank58, beam center intensity spills over adjacent tubes, tube15 and tube16 CORELLI_124018_bank45, tube11 is not working at all CORELLI_123555_bank20, insufficient intensity for all tubes in the bank CORELLI_124023_bank10, tube 13 has shadows at pixel numbers quite different from the rest CORELLI_124023_bank14, wire shadows very faint, only slightly larger than fluctuations of the background CORELLI_124023_bank15, one spurious shadow in tube14 Load the test case for calibrate_banks, consisting of data for two banks CORELLI_124023_banks_14_15 """ config.appendDataSearchSubDir('CORELLI/calibration') for directory in config.getDataSearchDirs(): if 'UnitTest' in directory: data_dir = path.join(directory, 'CORELLI', 'calibration') break cls.cases = dict() for bank_case in ('123454_bank58', '124018_bank45', '123555_bank20', '123455_bank20', '124023_bank10', '124023_bank14', '124023_bank15', '124023_banks_14_15'): workspace = 'CORELLI_' + bank_case LoadNexusProcessed(Filename=path.join(data_dir, workspace + '.nxs'), OutputWorkspace=workspace) cls.cases[bank_case] = workspace def assert_missing_tube(cls_other, calibration_table, tube_number): r"""Check detector ID's from a failing tube are not in the calibration table""" table = mtd[str(calibration_table)] first = table.cell('Detector ID', 0) # first detector ID # Would-be first and last detectors ID's for the failing tube begin, end = first + (tube_number - 1) * 256, first + tube_number * 256 - 1 detectors_ids = table.column(0) assert begin not in detectors_ids assert end not in detectors_ids # sneak in a class method, make sure it's loaded before any tests is executed cls.assert_missing_tube = assert_missing_tube
def PyExec(self): filenames = self._getLinearizedFilenames('Filename') self.filterBadPulses = self.getProperty('FilterBadPulses').value self.chunkSize = self.getProperty('MaxChunkSize').value self.absorption = self.getProperty('AbsorptionWorkspace').value self.charac = self.getProperty('Characterizations').value finalname = self.getProperty('OutputWorkspace').valueAsStr self.prog_per_file = 1. / float( len(filenames)) # for better progress reporting # these are also passed into the child-algorithms self.kwargs = self.__getAlignAndFocusArgs() # outer loop creates chunks to load for (i, filename) in enumerate(filenames): # default name is based off of filename wkspname = os.path.split(filename)[-1].split('.')[0] self.__determineCharacterizations( filename, wkspname) # updates instance variable cachefile = self.__getCacheName(wkspname) wkspname += '_f%d' % i # add file number to be unique if cachefile is not None and os.path.exists(cachefile): LoadNexusProcessed(Filename=cachefile, OutputWorkspace=wkspname) # TODO LoadNexusProcessed has a bug. When it finds the # instrument name without xml it reads in from an IDF # in the instrument directory. editinstrargs = {} for name in PROPS_FOR_INSTR: prop = self.getProperty(name) if not prop.isDefault: editinstrargs[name] = prop.value if editinstrargs: EditInstrumentGeometry(Workspace=wkspname, **editinstrargs) else: self.__processFile(filename, wkspname, self.prog_per_file * float(i)) if cachefile is not None: SaveNexusProcessed(InputWorkspace=wkspname, Filename=cachefile) # accumulate runs if i == 0: if wkspname != finalname: RenameWorkspace(InputWorkspace=wkspname, OutputWorkspace=finalname) else: Plus(LHSWorkspace=finalname, RHSWorkspace=wkspname, OutputWorkspace=finalname, ClearRHSWorkspace=self.kwargs['PreserveEvents']) DeleteWorkspace(Workspace=wkspname) if self.kwargs['PreserveEvents']: CompressEvents(InputWorkspace=finalname, OutputWorkspace=finalname) # with more than one chunk or file the integrated proton charge is # generically wrong mtd[finalname].run().integrateProtonCharge() # set the output workspace self.setProperty('OutputWorkspace', mtd[finalname])
def test_save_banks_binning(self): save_banks(self.wksp, self.out_nxs, 'wksp', '.', Binning='0,100,10000') out_wksp = LoadNexusProcessed(self.out_nxs) self.assertNotEqual(out_wksp.blocksize(), self.wksp.blocksize()) self.assertEqual(out_wksp.blocksize(), 100)
def PyExec(self): in_Runs = self.getProperty("RunNumbers").value progress = Progress(self, 0., .25, 3) finalUnits = self.getPropertyValue("FinalUnits") self.chunkSize = self.getProperty('MaxChunkSize').value # default arguments for AlignAndFocusPowder self.alignAndFocusArgs = {'Tmin': 0, 'TMax': 50000, 'RemovePromptPulseWidth': 1600, 'PreserveEvents': False, 'Dspacing': True, # binning parameters in d-space 'Params': self.getProperty("Binning").value, } # workspace for loading metadata only to be used in LoadDiffCal and # CreateGroupingWorkspace metaWS = None # either type of file-based calibration is stored in the same variable calib = self.getProperty("Calibration").value detcalFile = None if calib == "Calibration File": metaWS = self._loadMetaWS(in_Runs[0]) LoadDiffCal(Filename=self.getPropertyValue("CalibrationFilename"), WorkspaceName='SNAP', InputWorkspace=metaWS, MakeGroupingWorkspace=False, MakeMaskWorkspace=False) self.alignAndFocusArgs['CalibrationWorkspace'] = 'SNAP_cal' elif calib == 'DetCal File': detcalFile = ','.join(self.getProperty('DetCalFilename').value) progress.report('loaded calibration') norm = self.getProperty("Normalization").value if norm == "From Processed Nexus": norm_File = self.getProperty("NormalizationFilename").value normalizationWS = 'normWS' LoadNexusProcessed(Filename=norm_File, OutputWorkspace=normalizationWS) progress.report('loaded normalization') elif norm == "From Workspace": normalizationWS = str(self.getProperty("NormalizationWorkspace").value) progress.report('') else: normalizationWS = None progress.report('') self.alignAndFocusArgs['GroupingWorkspace'] = self._generateGrouping(in_Runs[0], metaWS, progress) self.alignAndFocusArgs['MaskWorkspace'] = self._getMaskWSname(in_Runs[0], metaWS) # can be empty string if metaWS is not None: DeleteWorkspace(Workspace=metaWS) Process_Mode = self.getProperty("ProcessingMode").value prefix = self.getProperty("OptionalPrefix").value Tag = 'SNAP' progStart = .25 progDelta = (1.-progStart)/len(in_Runs) # --------------------------- PROCESS BACKGROUND ---------------------- if not self.getProperty('Background').isDefault: progDelta = (1. - progStart) / (len(in_Runs) + 1) # redefine to account for background background = 'SNAP_{}'.format(self.getProperty('Background').value) self.log().notice("processing run background {}".format(background)) background, unfocussedBkgd = self._alignAndFocus(background, background+'_bkgd_red', detCalFilename=detcalFile, withUnfocussed=(Process_Mode == 'Set-Up'), progStart=progStart, progDelta=progDelta) else: background = None unfocussedBkgd = '' # --------------------------- REDUCE DATA ----------------------------- for i, runnumber in enumerate(in_Runs): self.log().notice("processing run %s" % runnumber) # put together output names new_Tag = Tag if len(prefix) > 0: new_Tag = prefix + '_' + new_Tag basename = '%s_%s_%s' % (new_Tag, runnumber, self.alignAndFocusArgs['GroupingWorkspace']) self.log().warning('{}:{}:{}'.format(i, new_Tag, basename)) redWS, unfocussedWksp = self._alignAndFocus('SNAP_{}'.format(runnumber), basename + '_red', detCalFilename=detcalFile, withUnfocussed=(Process_Mode == 'Set-Up'), progStart=progStart, progDelta=progDelta*.5) progStart += .5 * progDelta # subtract the background if it was supplied if background: self.log().information('subtracting {} from {}'.format(background, redWS)) Minus(LHSWorkspace=redWS, RHSWorkspace=background, OutputWorkspace=redWS) # intentionally don't subtract the unfocussed workspace since it hasn't been normalized by counting time # the rest takes up .25 percent of the run processing progress = Progress(self, progStart, progStart+.25*progDelta, 2) # AlignAndFocusPowder leaves the data in time-of-flight ConvertUnits(InputWorkspace=redWS, OutputWorkspace=redWS, Target='dSpacing', EMode='Elastic') # Edit instrument geometry to make final workspace smaller on disk det_table = PreprocessDetectorsToMD(Inputworkspace=redWS, OutputWorkspace='__SNAP_det_table') polar = np.degrees(det_table.column('TwoTheta')) azi = np.degrees(det_table.column('Azimuthal')) EditInstrumentGeometry(Workspace=redWS, L2=det_table.column('L2'), Polar=polar, Azimuthal=azi) mtd.remove('__SNAP_det_table') progress.report('simplify geometry') # AlignAndFocus doesn't necessarily rebin the data correctly if Process_Mode == "Set-Up": Rebin(InputWorkspace=unfocussedWksp, Params=self.alignAndFocusArgs['Params'], Outputworkspace=unfocussedWksp) if background: Rebin(InputWorkspace=unfocussedBkgd, Params=self.alignAndFocusArgs['Params'], Outputworkspace=unfocussedBkgd) # normalize the data as requested normalizationWS = self._generateNormalization(redWS, norm, normalizationWS) normalizedWS = None if normalizationWS is not None: normalizedWS = basename + '_nor' Divide(LHSWorkspace=redWS, RHSWorkspace=normalizationWS, OutputWorkspace=normalizedWS) ReplaceSpecialValues(Inputworkspace=normalizedWS, OutputWorkspace=normalizedWS, NaNValue='0', NaNError='0', InfinityValue='0', InfinityError='0') progress.report('normalized') else: progress.report() # rename everything as appropriate and determine output workspace name if normalizedWS is None: outputWksp = redWS else: outputWksp = normalizedWS if norm == "Extracted from Data" and Process_Mode == "Production": DeleteWorkspace(Workspace=redWS) DeleteWorkspace(Workspace=normalizationWS) # Save requested formats - function checks that saving is requested self._save(runnumber, basename, outputWksp) # set workspace as an output so it gets history ConvertUnits(InputWorkspace=str(outputWksp), OutputWorkspace=str(outputWksp), Target=finalUnits, EMode='Elastic') self._exportWorkspace('OutputWorkspace_' + str(outputWksp), outputWksp) # declare some things as extra outputs in set-up if Process_Mode != "Production": propprefix = 'OutputWorkspace_{:d}_'.format(i) propNames = [propprefix + it for it in ['d', 'norm', 'normalizer']] wkspNames = ['%s_%s_d' % (new_Tag, runnumber), basename + '_red', '%s_%s_normalizer' % (new_Tag, runnumber)] for (propName, wkspName) in zip(propNames, wkspNames): self._exportWorkspace(propName, wkspName) if background: ConvertUnits(InputWorkspace=str(background), OutputWorkspace=str(background), Target=finalUnits, EMode='Elastic') prefix = 'OutputWorkspace_{}'.format(len(in_Runs)) propNames = [prefix + it for it in ['', '_d']] wkspNames = [background, unfocussedBkgd] for (propName, wkspName) in zip(propNames, wkspNames): self._exportWorkspace(propName, wkspName)
def test_new_corelli_calibration_and_load_calibration(self): r"""Creating a database is time consuming, thus we test both new_corelli_calibration and load_calibration""" # populate a calibration database with a few cases. There should be at least one bank with two calibrations database = tempfile.TemporaryDirectory() cases = [('124016_bank10', '10'), ('124023_bank10', '10'), ('124023_banks_14_15', '14-15')] for bank_case, bank_selection in cases: # Produce workspace groups 'calibrations', 'masks', 'fits' calibrate_banks(self.cases[bank_case], bank_selection) masks = 'masks' if AnalysisDataService.doesExist('masks') else None save_calibration_set(self.cases[bank_case], database.name, 'calibrations', masks, 'fits') DeleteWorkspaces(['calibrations', 'fits']) if AnalysisDataService.doesExist('masks'): DeleteWorkspaces(['masks']) # invoque creation of new corelli calibration without a date calibration_file, mask_file, manifest_file = new_corelli_calibration(database.name) for file_path in (calibration_file, mask_file, manifest_file): assert path.exists(file_path) assert open(manifest_file).read() == 'bankID, timestamp\n10, 20200109\n14, 20200109\n15, 20200109\n' # load latest calibration and mask (day-stamp of '124023_bank10' is 20200109) calibration, mask = load_calibration_set(self.cases['124023_bank10'], database.name, mask_format='TableWorkspace') calibration_expected = LoadNexusProcessed(Filename=calibration_file) mask_expected = LoadNexusProcessed(Filename=mask_file) assert_allclose(calibration.column(1), calibration_expected.column(1), atol=1e-4) assert mask.column(0) == mask_expected.column(0) # invoque a new corelli calibration with a date falling in between the bank (bank10) in # in our small dataset having two calibrations calibration_file, mask_file, manifest_file = new_corelli_calibration(database.name, date='20200108') for file_path in (calibration_file, mask_file, manifest_file): assert path.exists(file_path) assert open(manifest_file).read() == 'bankID, timestamp\n10, 20200106\n' # load oldest calibration and mask(day-stamp of '124023_bank10' is 20200106) calibration, mask = load_calibration_set(self.cases['124016_bank10'], database.name, mask_format='TableWorkspace') calibration_expected = LoadNexusProcessed(Filename=calibration_file) mask_expected = LoadNexusProcessed(Filename=mask_file) assert_allclose(calibration.column(1), calibration_expected.column(1), atol=1e-4) assert mask.column(0) == mask_expected.column(0) database.cleanup()
def runTest(self): raw_ws = LoadNexusProcessed(self.TEST_FILENAME) ReflectometryISISSumBanks(InputWorkspace=raw_ws, OutputWorkspace=self.OUT_WS_NAME)
def PyExec(self): in_Runs = self.getProperty("RunNumbers").value maskWSname = self._getMaskWSname() progress = Progress(self, 0., .25, 3) # default arguments for AlignAndFocusPowder alignAndFocusArgs = { 'TMax': 50000, 'RemovePromptPulseWidth': 1600, 'PreserveEvents': False, 'Dspacing': True, # binning parameters in d-space 'Params': self.getProperty("Binning").value } # workspace for loading metadata only to be used in LoadDiffCal and # CreateGroupingWorkspace metaWS = None # either type of file-based calibration is stored in the same variable calib = self.getProperty("Calibration").value detcalFile = None if calib == "Calibration File": metaWS = self._loadMetaWS(in_Runs[0]) LoadDiffCal(Filename=self.getPropertyValue("CalibrationFilename"), WorkspaceName='SNAP', InputWorkspace=metaWS, MakeGroupingWorkspace=False, MakeMaskWorkspace=False) alignAndFocusArgs['CalibrationWorkspace'] = 'SNAP_cal' elif calib == 'DetCal File': detcalFile = ','.join(self.getProperty('DetCalFilename').value) progress.report('loaded calibration') norm = self.getProperty("Normalization").value if norm == "From Processed Nexus": norm_File = self.getProperty("NormalizationFilename").value normalizationWS = 'normWS' LoadNexusProcessed(Filename=norm_File, OutputWorkspace=normalizationWS) progress.report('loaded normalization') elif norm == "From Workspace": normalizationWS = str( self.getProperty("NormalizationWorkspace").value) progress.report('') else: normalizationWS = None progress.report('') group = self._generateGrouping(in_Runs[0], metaWS, progress) if metaWS is not None: DeleteWorkspace(Workspace=metaWS) Process_Mode = self.getProperty("ProcessingMode").value prefix = self.getProperty("OptionalPrefix").value # --------------------------- REDUCE DATA ----------------------------- Tag = 'SNAP' if self.getProperty("LiveData").value: Tag = 'Live' progStart = .25 progDelta = (1. - progStart) / len(in_Runs) for i, runnumber in enumerate(in_Runs): self.log().notice("processing run %s" % runnumber) self.log().information(str(self.get_IPTS_Local(runnumber))) # put together output names new_Tag = Tag if len(prefix) > 0: new_Tag += '_' + prefix basename = '%s_%s_%s' % (new_Tag, runnumber, group) if self.getProperty("LiveData").value: raise RuntimeError('Live data is not currently supported') else: Load(Filename='SNAP' + str(runnumber), OutputWorkspace=basename + '_red', startProgress=progStart, endProgress=progStart + .25 * progDelta) progStart += .25 * progDelta redWS = basename + '_red' # overwrite geometry with detcal files if calib == 'DetCal File': LoadIsawDetCal(InputWorkspace=redWS, Filename=detcalFile) # create unfocussed data if in set-up mode if Process_Mode == "Set-Up": unfocussedWksp = '{}_{}_d'.format(new_Tag, runnumber) else: unfocussedWksp = '' AlignAndFocusPowder( InputWorkspace=redWS, OutputWorkspace=redWS, MaskWorkspace=maskWSname, # can be empty string GroupingWorkspace=group, UnfocussedWorkspace=unfocussedWksp, # can be empty string startProgress=progStart, endProgress=progStart + .5 * progDelta, **alignAndFocusArgs) progStart += .5 * progDelta # the rest takes up .25 percent of the run processing progress = Progress(self, progStart, progStart + .25 * progDelta, 2) # AlignAndFocusPowder leaves the data in time-of-flight ConvertUnits(InputWorkspace=redWS, OutputWorkspace=redWS, Target='dSpacing', EMode='Elastic') # Edit instrument geometry to make final workspace smaller on disk det_table = PreprocessDetectorsToMD( Inputworkspace=redWS, OutputWorkspace='__SNAP_det_table') polar = np.degrees(det_table.column('TwoTheta')) azi = np.degrees(det_table.column('Azimuthal')) EditInstrumentGeometry(Workspace=redWS, L2=det_table.column('L2'), Polar=polar, Azimuthal=azi) mtd.remove('__SNAP_det_table') progress.report('simplify geometry') # AlignAndFocus doesn't necessarily rebin the data correctly if Process_Mode == "Set-Up": Rebin(InputWorkspace=unfocussedWksp, Params=alignAndFocusArgs['Params'], Outputworkspace=unfocussedWksp) NormaliseByCurrent(InputWorkspace=redWS, OutputWorkspace=redWS) # normalize the data as requested normalizationWS = self._generateNormalization( redWS, norm, normalizationWS) normalizedWS = None if normalizationWS is not None: normalizedWS = basename + '_nor' Divide(LHSWorkspace=redWS, RHSWorkspace=normalizationWS, OutputWorkspace=normalizedWS) ReplaceSpecialValues(Inputworkspace=normalizedWS, OutputWorkspace=normalizedWS, NaNValue='0', NaNError='0', InfinityValue='0', InfinityError='0') progress.report('normalized') else: progress.report() # rename everything as appropriate and determine output workspace name if normalizedWS is None: outputWksp = redWS else: outputWksp = normalizedWS if norm == "Extracted from Data" and Process_Mode == "Production": DeleteWorkspace(Workspace=redWS) DeleteWorkspace(Workspace=normalizationWS) # Save requested formats saveDir = self.getPropertyValue("OutputDirectory").strip() if len(saveDir) <= 0: self.log().notice('Using default save location') saveDir = os.path.join(self.get_IPTS_Local(runnumber), 'shared', 'data') self._save(saveDir, basename, outputWksp) # set workspace as an output so it gets history propertyName = 'OutputWorkspace_' + str(outputWksp) self.declareProperty( WorkspaceProperty(propertyName, outputWksp, Direction.Output)) self.setProperty(propertyName, outputWksp) # declare some things as extra outputs in set-up if Process_Mode != "Production": prefix = 'OuputWorkspace_{:d}_'.format(i) propNames = [prefix + it for it in ['d', 'norm', 'normalizer']] wkspNames = [ '%s_%s_d' % (new_Tag, runnumber), basename + '_red', '%s_%s_normalizer' % (new_Tag, runnumber) ] for (propName, wkspName) in zip(propNames, wkspNames): if mtd.doesExist(wkspName): self.declareProperty( WorkspaceProperty(propName, wkspName, Direction.Output)) self.setProperty(propName, wkspName)
def PyExec(self): # Retrieve all relevant notice in_Runs = self.getProperty("RunNumbers").value maskWSname = self._getMaskWSname() # either type of file-based calibration is stored in the same variable calib = self.getProperty("Calibration").value if calib == "Calibration File": cal_File = self.getProperty("CalibrationFilename").value elif calib == 'DetCal File': cal_File = self.getProperty('DetCalFilename').value cal_File = ','.join(cal_File) else: cal_File = None params = self.getProperty("Binning").value norm = self.getProperty("Normalization").value if norm == "From Processed Nexus": norm_File = self.getProperty("NormalizationFilename").value LoadNexusProcessed(Filename=norm_File, OutputWorkspace='normWS') normWS = 'normWS' elif norm == "From Workspace": normWS = str(self.getProperty("NormalizationWorkspace").value) else: normWS = None group_to_real = { 'Banks': 'Group', 'Modules': 'bank', '2_4 Grouping': '2_4Grouping' } group = self.getProperty('GroupDetectorsBy').value real_name = group_to_real.get(group, group) if not mtd.doesExist(group): if group == '2_4 Grouping': group = '2_4_Grouping' CreateGroupingWorkspace(InstrumentName='SNAP', GroupDetectorsBy=real_name, OutputWorkspace=group) Process_Mode = self.getProperty("ProcessingMode").value prefix = self.getProperty("OptionalPrefix").value # --------------------------- REDUCE DATA ----------------------------- Tag = 'SNAP' for r in in_Runs: self.log().notice("processing run %s" % r) self.log().information(str(self.get_IPTS_Local(r))) if self.getProperty("LiveData").value: Tag = 'Live' LoadPreNexusLive(Instrument='SNAP', OutputWorkspace='WS') else: Load(Filename='SNAP' + str(r), OutputWorkspace='WS') NormaliseByCurrent(InputWorkspace='WS', OutputWorkspace='WS') CompressEvents(InputWorkspace='WS', OutputWorkspace='WS') CropWorkspace(InputWorkspace='WS', OutputWorkspace='WS', XMax=50000) RemovePromptPulse(InputWorkspace='WS', OutputWorkspace='WS', Width='1600', Frequency='60.4') if maskWSname is not None: MaskDetectors(Workspace='WS', MaskedWorkspace=maskWSname) self._alignAndFocus(params, calib, cal_File, group) normWS = self._generateNormalization('WS_red', norm, normWS) WS_nor = None if normWS is not None: WS_nor = 'WS_nor' Divide(LHSWorkspace='WS_red', RHSWorkspace=normWS, OutputWorkspace='WS_nor') ReplaceSpecialValues(Inputworkspace='WS_nor', OutputWorkspace='WS_nor', NaNValue='0', NaNError='0', InfinityValue='0', InfinityError='0') new_Tag = Tag if len(prefix) > 0: new_Tag += '_' + prefix # Edit instrument geomety to make final workspace smaller on disk det_table = PreprocessDetectorsToMD( Inputworkspace='WS_red', OutputWorkspace='__SNAP_det_table') polar = np.degrees(det_table.column('TwoTheta')) azi = np.degrees(det_table.column('Azimuthal')) EditInstrumentGeometry(Workspace='WS_red', L2=det_table.column('L2'), Polar=polar, Azimuthal=azi) if WS_nor is not None: EditInstrumentGeometry(Workspace='WS_nor', L2=det_table.column('L2'), Polar=polar, Azimuthal=azi) mtd.remove('__SNAP_det_table') # Save requested formats basename = '%s_%s_%s' % (new_Tag, r, group) self._save(r, basename, norm) # temporary workspace no longer needed DeleteWorkspace(Workspace='WS') # rename everything as appropriate and determine output workspace name RenameWorkspace(Inputworkspace='WS_d', OutputWorkspace='%s_%s_d' % (new_Tag, r)) RenameWorkspace(Inputworkspace='WS_red', OutputWorkspace=basename + '_red') if norm == 'None': outputWksp = basename + '_red' else: outputWksp = basename + '_nor' RenameWorkspace(Inputworkspace='WS_nor', OutputWorkspace=basename + '_nor') if norm == "Extracted from Data": RenameWorkspace(Inputworkspace='peak_clip_WS', OutputWorkspace='%s_%s_normalizer' % (new_Tag, r)) # delte some things in production if Process_Mode == "Production": DeleteWorkspace(Workspace='%s_%s_d' % (new_Tag, r)) # was 'WS_d' if norm != "None": DeleteWorkspace(Workspace=basename + '_red') # was 'WS_red' if norm == "Extracted from Data": DeleteWorkspace(Workspace='%s_%s_normalizer' % (new_Tag, r)) # was 'peak_clip_WS' propertyName = 'OutputWorkspace_' + str(outputWksp) self.declareProperty( WorkspaceProperty(propertyName, outputWksp, Direction.Output)) self.setProperty(propertyName, outputWksp)
def PyExec(self): filenames = self._getLinearizedFilenames('Filename') self.filterBadPulses = self.getProperty('FilterBadPulses').value self.chunkSize = self.getProperty('MaxChunkSize').value self.absorption = self.getProperty('AbsorptionWorkspace').value self.charac = self.getProperty('Characterizations').value finalname = self.getPropertyValue('OutputWorkspace') useCaching = len(self.getProperty('CacheDir').value) > 0 # accumulate the unfocused workspace if it was requested # empty string means it is not used unfocusname = self.getPropertyValue('UnfocussedWorkspace') unfocusname_file = '' if len(unfocusname) > 0: unfocusname_file = '__{}_partial'.format(unfocusname) if useCaching: # unfocus check only matters if caching is requested if unfocusname != '': self.log().warning( 'CacheDir is specified with "UnfocussedWorkspace" - reading cache files disabled' ) else: self.log().warning( 'CacheDir is not specified - functionality disabled') self.prog_per_file = 1. / float( len(filenames)) # for better progress reporting # these are also passed into the child-algorithms self.kwargs = self.__getAlignAndFocusArgs() # outer loop creates chunks to load for (i, filename) in enumerate(filenames): # default name is based off of filename wkspname = os.path.split(filename)[-1].split('.')[0] if useCaching: self.__determineCharacterizations( filename, wkspname, True) # updates instance variable cachefile = self.__getCacheName(wkspname) else: cachefile = None wkspname += '_f%d' % i # add file number to be unique # if the unfocussed data is requested, don't read it from disk # because of the extra complication of the unfocussed workspace if useCaching and os.path.exists(cachefile) and unfocusname == '': LoadNexusProcessed(Filename=cachefile, OutputWorkspace=wkspname) # TODO LoadNexusProcessed has a bug. When it finds the # instrument name without xml it reads in from an IDF # in the instrument directory. editinstrargs = {} for name in PROPS_FOR_INSTR: prop = self.getProperty(name) if not prop.isDefault: editinstrargs[name] = prop.value if editinstrargs: EditInstrumentGeometry(Workspace=wkspname, **editinstrargs) else: self.__processFile(filename, wkspname, unfocusname_file, self.prog_per_file * float(i), not useCaching) # write out the cachefile for the main reduced data independent of whether # the unfocussed workspace was requested if useCaching: SaveNexusProcessed(InputWorkspace=wkspname, Filename=cachefile) # accumulate runs if i == 0: if wkspname != finalname: RenameWorkspace(InputWorkspace=wkspname, OutputWorkspace=finalname) if unfocusname != '': RenameWorkspace(InputWorkspace=unfocusname_file, OutputWorkspace=unfocusname) else: Plus(LHSWorkspace=finalname, RHSWorkspace=wkspname, OutputWorkspace=finalname, ClearRHSWorkspace=self.kwargs['PreserveEvents']) DeleteWorkspace(Workspace=wkspname) if unfocusname != '': Plus(LHSWorkspace=unfocusname, RHSWorkspace=unfocusname_file, OutputWorkspace=unfocusname, ClearRHSWorkspace=self.kwargs['PreserveEvents']) DeleteWorkspace(Workspace=unfocusname_file) if self.kwargs['PreserveEvents']: CompressEvents(InputWorkspace=finalname, OutputWorkspace=finalname) # not compressing unfocussed workspace because it is in d-spacing # and is likely to be from a different part of the instrument # with more than one chunk or file the integrated proton charge is # generically wrong mtd[finalname].run().integrateProtonCharge() # set the output workspace self.setProperty('OutputWorkspace', mtd[finalname]) if unfocusname != '': self.setProperty('UnfocussedWorkspace', mtd[unfocusname])