def _convert_to_angle(self, w, name): """ Output the integrated intensity for each elastic detector versus detector angle with the neutron beam. Masked elastic detectors are assigned a zero intensity Parameters ---------- w: Mantid.MatrixWorkspace2D name: str Name of output workspace Returns ------- Mantid.MatrixWorkspace2D """ id_s, id_e = 16386, 17534 # start and end for elastic detector ID's _t_w_name = tws('convert_to_angle') _t_w = Integration(w, OutputWorkspace=_t_w_name) sp = _t_w.spectrumInfo() x, y, e = [list(), list(), list()] for i in range(_t_w.getNumberHistograms()): id_i = _t_w.getDetector(i).getID() if id_s <= id_i <= id_e: x.append(np.degrees(sp.twoTheta(i))) if sp.isMasked(i) is True: y.append(0.0) e.append(1.0) else: y.append(_t_w.readY(i)[0]) e.append(_t_w.readE(i)[0]) x = np.asarray(x) y = np.asarray(y) e = np.asarray(e) od = np.argsort(x) # order in ascending angles title = 'Angle between detector and incoming neutron beam' _t_w = CreateWorkspace(DataX=x[od], DataY=y[od], DataE=e[od], NSpec=1, UnitX='Degrees', WorkspaceTitle=title, OutputWorkspace=_t_w_name) RenameWorkspace(_t_w, OutputWorkspace=name) return _t_w
def PyExec(self): from mantid.simpleapi import CropWorkspace, Integration, DeleteWorkspace in_ws = self.getPropertyValue("InputWorkspace") min_wavelength = self.getPropertyValue("StartWavelength") keep_workspaces = self.getPropertyValue("KeepIntermediateWorkspaces") # Crop off lower wavelengths where the signal is also lower. cropped_ws = CropWorkspace(InputWorkspace=in_ws, XMin=float(min_wavelength)) # Integrate over the higher wavelengths after cropping. summed_ws = Integration(InputWorkspace=cropped_ws) # Loop through each histogram, and fetch out each intensity value from the single bin to generate a list of all values. n_histograms = summed_ws.getNumberHistograms() y_data = np.empty([n_histograms]) for i in range(0, n_histograms): intensity = summed_ws.readY(i)[0] y_data[i] = intensity #Remove the background y_data = self.__remove_background(y_data) #Find the peaks peak_index_list = self.__find_peak_spectrum_numbers(y_data, summed_ws) #Reverse the order so that it goes from high spec number to low spec number peak_index_list.reverse() n_peaks_found = len(peak_index_list) output_ws = WorkspaceFactory.createTable("TableWorkspace") output_ws.addColumn("int", "Reflected Spectrum Number") if n_peaks_found > 2: raise PeakFindingException("Found more than two peaks.") elif n_peaks_found == 0: raise PeakFindingException("No peaks found") elif n_peaks_found == 1: output_ws.addRow(peak_index_list) elif n_peaks_found == 2: output_ws.addColumn("int", "Transmission Spectrum Number") output_ws.addRow(peak_index_list) if int(keep_workspaces) == 0: DeleteWorkspace(Workspace=cropped_ws) DeleteWorkspace(Workspace=summed_ws) self.setProperty("OutputWorkspace", output_ws)
def PyExec(self): from mantid.simpleapi import CropWorkspace, Integration, DeleteWorkspace in_ws = self.getPropertyValue("InputWorkspace") min_wavelength = self.getPropertyValue("StartWavelength") keep_workspaces = self.getPropertyValue("KeepIntermediateWorkspaces") # Crop off lower wavelengths where the signal is also lower. cropped_ws = CropWorkspace(InputWorkspace=in_ws,XMin=float(min_wavelength)) # Integrate over the higher wavelengths after cropping. summed_ws = Integration(InputWorkspace=cropped_ws) # Loop through each histogram, and fetch out each intensity value from the single bin to generate a list of all values. n_histograms = summed_ws.getNumberHistograms() y_data = np.empty([n_histograms]) for i in range(0, n_histograms): intensity = summed_ws.readY(i)[0] y_data[i] = intensity #Remove the background y_data = self.__remove_background(y_data) #Find the peaks peak_index_list = self.__find_peak_spectrum_numbers(y_data, summed_ws) #Reverse the order so that it goes from high spec number to low spec number peak_index_list.reverse() n_peaks_found = len(peak_index_list) output_ws = WorkspaceFactory.createTable("TableWorkspace") output_ws.addColumn("int", "Reflected Spectrum Number") if n_peaks_found > 2: raise PeakFindingException("Found more than two peaks.") elif n_peaks_found == 0: raise PeakFindingException("No peaks found") elif n_peaks_found == 1: output_ws.addRow(peak_index_list) elif n_peaks_found == 2: output_ws.addColumn("int", "Transmission Spectrum Number") output_ws.addRow(peak_index_list) if int(keep_workspaces) == 0: DeleteWorkspace(Workspace=cropped_ws) DeleteWorkspace(Workspace=summed_ws) self.setProperty("OutputWorkspace", output_ws)
def _convert_to_angle(self, w): """ Output the integrated intensity for each elastic detector versus detector angle with the neutron beam. Masked elastic detectors are assigned a zero intensity Parameters ---------- w: Mantid.MatrixWorkspace2D Returns ------- Mantid.MatrixWorkspace2D """ id_s, id_e = 16386, 17534 # start and end for elastic detector ID's _t_w = Integration(w) sp = _t_w.spectrumInfo() x, y, e = [list(), list(), list()] for i in range(_t_w.getNumberHistograms()): id_i = _t_w.getDetector(i).getID() if id_s <= id_i <= id_e: x.append(np.degrees(sp.twoTheta(i))) if sp.isMasked(i) is True: y.append(0.0) e.append(1.0) else: y.append(_t_w.readY(i)[0]) e.append(_t_w.readE(i)[0]) x = np.asarray(x) y = np.asarray(y) e = np.asarray(e) od = np.argsort(x) # order in ascending angles title = 'Angle between detector and incoming neutron beam' _t_w = CreateWorkspace(DataX=x[od], DataY=y[od], DataE=e[od], NSpec=1, UnitX='Degrees', WorkspaceTitle=title) return _t_w
def cc_calibrate_groups(data_ws, group_ws, output_basename="_tmp_group_cc_calibration", previous_calibration=None, Step=0.001, DReference=1.2615, Xmin=1.22, Xmax=1.30, MaxDSpaceShift=None, OffsetThreshold=1E-4, SkipCrossCorrelation=[], PeakFunction="Gaussian", SmoothNPoints=0): """This will perform the CrossCorrelate/GetDetectorOffsets on a group of detector pixel. It works by looping over the different groups in the group_ws, extracting all unmasked spectra of a group, then running CrossCorrelate and GetDetectorOffsets on just that group, and combinning the results at the end. When running a group, CrossCorrelate and GetDetectorOffsets could be cycled until converging of offsets is reached, given the user input offset threshold. If offset threshold is specified to be equal to or larger than 1.0, no cycling will be carried out. The first unmasked spectra of the group will be used for the ReferenceSpectra in CrossCorrelate. :param data_ws: Input calibration raw data (in TOF), assumed to already be correctly masked :param group_ws: grouping workspace, e.g. output from LoadDetectorsGroupingFile :param output_basename: Optional name to use for temporay and output workspace :param previous_calibration: Optional previous diffcal workspace :param Step: step size for binning of data and input for GetDetectorOffsets, default 0.001 :param DReference: Derefernce parameter for GetDetectorOffsets, default 1.2615 :param Xmin: Xmin parameter for CrossCorrelate, default 1.22 :param Xmax: Xmax parameter for CrossCorrelate, default 1.30 :param MaxDSpaceShift: MaxDSpaceShift paramter for CrossCorrelate, default None :param OffsetThreshold: Convergence threshold for cycling cross correlation, default 1E-4 :param SkipCrossCorrelation: Skip cross correlation for specified groups :param PeakFunction: Peak function to use for extracting the offset :param SmoothNPoints: Number of points for smoothing spectra, for cross correlation ONLY :return: Combined DiffCal workspace from all the different groups """ if previous_calibration: ApplyDiffCal(data_ws, CalibrationWorkspace=previous_calibration) data_d = ConvertUnits(data_ws, Target='dSpacing', OutputWorkspace='data_d') group_list = np.unique(group_ws.extractY()) _accum_cc = None to_skip = [] for group in group_list: # Figure out input parameters for CrossCorrelate and GetDetectorOffset, specifically # for those parameters for which both a single value and a list is accepted. If a # list is given, that means different parameter setup will be used for different groups. Xmin_group = Xmin[int(group) - 1] if type(Xmin) == list else Xmin Xmax_group = Xmax[int(group) - 1] if type(Xmax) == list else Xmax MDS_group = MaxDSpaceShift[int(group) - 1] if type(MaxDSpaceShift) == list else MaxDSpaceShift DRef_group = DReference[int(group) - 1] if type(DReference) == list else DReference OT_group = OffsetThreshold[int(group) - 1] if type(OffsetThreshold) == list else OffsetThreshold pf_group = PeakFunction[int(group) - 1] if type(PeakFunction) == list else PeakFunction snpts_group = SmoothNPoints[int(group) - 1] if type(SmoothNPoints) == list else SmoothNPoints cycling = OT_group < 1.0 indexes = np.where(group_ws.extractY().flatten() == group)[0] sn = np.array(group_ws.getSpectrumNumbers())[indexes] try: ws_indexes = [data_d.getIndexFromSpectrumNumber(int(i)) for i in sn] except RuntimeError: # data does not contain spectrum in group continue if group in SkipCrossCorrelation: to_skip.extend(ws_indexes) ExtractSpectra(data_d, WorkspaceIndexList=ws_indexes, OutputWorkspace='_tmp_group_cc') ExtractUnmaskedSpectra('_tmp_group_cc', OutputWorkspace='_tmp_group_cc') ExtractSpectra(data_ws, WorkspaceIndexList=ws_indexes, OutputWorkspace='_tmp_group_cc_raw') ExtractUnmaskedSpectra('_tmp_group_cc_raw', OutputWorkspace='_tmp_group_cc_raw') num_spectra = mtd['_tmp_group_cc'].getNumberHistograms() if num_spectra < 2: continue Rebin('_tmp_group_cc', Params=f'{Xmin_group},{Step},{Xmax_group}', OutputWorkspace='_tmp_group_cc') if snpts_group >= 3: SmoothData('_tmp_group_cc', NPoints=snpts_group, OutputWorkspace='_tmp_group_cc') # Figure out brightest spectra to be used as the reference for cross correlation. CloneWorkspace('_tmp_group_cc_raw', OutputWorkspace='_tmp_group_cc_raw_tmp') intg = Integration('_tmp_group_cc_raw_tmp', StartWorkspaceIndex=0, EndWorkspaceIndex=num_spectra-1, OutputWorkspace='_tmp_group_intg') brightest_spec_index = int(np.argmax(np.array([intg.readY(i)[0] for i in range(num_spectra)]))) # Cycling cross correlation. At each step, we will use the obtained offsets and DIFC's from # previous step to obtain new DIFC's. In this way, spectra in group will come closer and closer # to each other as the cycle goes. This will continue until converging criterion is reached. The # converging criterion is set in such a way that the median value of all the non-zero offsets # should be smaller than the threshold (user tuned parameter, default to 1E-4, meaning 0.04% # relative offset). num_cycle = 1 while True: CrossCorrelate('_tmp_group_cc', Xmin=Xmin_group, XMax=Xmax_group, MaxDSpaceShift=MDS_group, ReferenceSpectra=brightest_spec_index, WorkspaceIndexMin=0, WorkspaceIndexMax=num_spectra-1, OutputWorkspace='_tmp_group_cc') bin_range = (Xmax_group-Xmin_group)/Step GetDetectorOffsets(InputWorkspace='_tmp_group_cc', Step=Step, Xmin=-bin_range, XMax=bin_range, DReference=DRef_group, MaxOffset=1, PeakFunction=pf_group, OutputWorkspace='_tmp_group_cc') if group not in SkipCrossCorrelation: offsets_tmp = [] for item in ws_indexes: if abs(mtd['_tmp_group_cc'].readY(item)) != 0: offsets_tmp.append(abs(mtd['_tmp_group_cc'].readY(item))) offsets_tmp = np.array(offsets_tmp) logger.notice(f'Running group-{group}, cycle-{num_cycle}.') logger.notice(f'Median offset (no sign) = {np.median(offsets_tmp)}') logger.notice(f'Running group-{group}, cycle-{num_cycle}.') logger.notice(f'Median offset (no sign) = {np.median(offsets_tmp)}') converged = np.median(offsets_tmp) < OT_group else: for item in ws_indexes: mtd['_tmp_group_cc'].dataY(item)[0] = 0.0 logger.notice(f'Cross correlation skipped for group-{group}.') converged = True if not cycling or converged: if cycling and converged: if group not in SkipCrossCorrelation: logger.notice(f'Cross correlation for group-{group} converged, ') logger.notice(f'with offset threshold {OT_group}.') break else: previous_calibration = ConvertDiffCal('_tmp_group_cc', PreviousCalibration=previous_calibration, OutputWorkspace='_tmp_group_cc_diffcal') ApplyDiffCal('_tmp_group_cc_raw', CalibrationWorkspace='_tmp_group_cc_diffcal') ConvertUnits('_tmp_group_cc_raw', Target='dSpacing', OutputWorkspace='_tmp_group_cc') Rebin('_tmp_group_cc', Params=f'{Xmin_group},{Step},{Xmax_group}', OutputWorkspace='_tmp_group_cc') num_cycle += 1 if not _accum_cc: _accum_cc = RenameWorkspace('_tmp_group_cc') else: _accum_cc += mtd['_tmp_group_cc'] # DeleteWorkspace('_tmp_group_cc') previous_calibration = ConvertDiffCal('_accum_cc', PreviousCalibration=previous_calibration, OutputWorkspace=f'{output_basename}_cc_diffcal') DeleteWorkspace('_accum_cc') DeleteWorkspace('_tmp_group_cc') DeleteWorkspace('_tmp_group_cc_raw') if cycling and '_tmp_group_cc_diffcal' in mtd: DeleteWorkspace('_tmp_group_cc_diffcal') return mtd[f'{output_basename}_cc_diffcal'], to_skip