def _scaleAfterMonitorNormalization(ws, wsNames, wsCleanup, algorithmLogging): """Scale ws by a factor given in the instrument parameters.""" SCALING_PARAM = 'scaling_after_monitor_normalisation' NON_RECURSIVE = False # Prevent recursive calls. instr = ws.getInstrument() if not instr.hasParameter(SCALING_PARAM, NON_RECURSIVE): return ws factor = instr.getNumberParameter(SCALING_PARAM, NON_RECURSIVE)[0] scaledWSName = wsNames.withSuffix('scaled_by_monitor_factor') scaledWS = Scale(InputWorkspace=ws, OutputWorkspace=scaledWSName, Factor=factor, EnableLogging=algorithmLogging) wsCleanup.cleanup(ws) return scaledWS
def _subtractEC(ws, ecWS, ecScaling, wsNames, wsCleanup, algorithmLogging): """Subtract empty container.""" # out = in - ecScaling * EC scaledECWSName = wsNames.withSuffix('scaled_EC') scaledECWS = Scale(InputWorkspace=ecWS, Factor=ecScaling, OutputWorkspace=scaledECWSName, EnableLogging=algorithmLogging) ecSubtractedWSName = wsNames.withSuffix('EC_subtracted') ecSubtractedWS = Minus(LHSWorkspace=ws, RHSWorkspace=scaledECWS, OutputWorkspace=ecSubtractedWSName, EnableLogging=algorithmLogging) wsCleanup.cleanup(scaledECWS) return ecSubtractedWS
def test_SimpleAlgorithm_Accepts_Group_Handle(self): from mantid.simpleapi import Scale self.create_matrix_workspace_in_ADS("First") self.create_matrix_workspace_in_ADS("Second") run_algorithm('GroupWorkspaces', InputWorkspaces='First,Second', OutputWorkspace='grouped') group = mtd['grouped'] try: w = Scale(group, 1.5) mtd.remove(str(w)) except Exception as exc: self.fail( "Algorithm raised an exception with input as WorkspaceGroup: '" + str(exc) + "'") mtd.remove(str(group))
def _absoluteUnits(ws, vanaWS, wsNames, wsCleanup, report, algorithmLogging): """Scales ws by an absolute units factor.""" sampleMaterial = ws.sample().getMaterial() sampleNumberDensity = sampleMaterial.numberDensity vanaMaterial = vanaWS.sample().getMaterial() vanaNumberDensity = vanaMaterial.numberDensity vanaCrossSection = vanaMaterial.totalScatterXSection() factor = vanaNumberDensity / sampleNumberDensity * vanaCrossSection if factor <= 0 or math.isnan(factor) or math.isinf(factor): raise RuntimeError('Invalid absolute units normalisation factor: {}'.format(factor)) report.notice('Absolute units scaling factor: {}'.format(factor)) scaledWSName = wsNames.withSuffix('absolute_units') scaledWS = Scale(InputWorkspace=ws, OutputWorkspace=scaledWSName, Factor=factor, EnableLogging=algorithmLogging) wsCleanup.cleanup(ws) return scaledWS
def setUp(self): """ Create sample workspaces. """ # Create some test data sample = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=1, XUnit='Wavelength', XMin=6.8, XMax=7.9, BinWidth=0.1) self._sample_ws = sample can = Scale(InputWorkspace=sample, Factor=1.2) self._can_ws = can self._corrections_ws_name = 'corrections'
def _subtractFlatBkg(ws, wsType, bkgWorkspace, bkgScaling, wsNames, wsCleanup, algorithmLogging): """Subtract a scaled flat background from a workspace.""" if wsType == common.WS_CONTENT_DETS: subtractedWSName = wsNames.withSuffix('flat_bkg_subtracted_detectors') scaledBkgWSName = wsNames.withSuffix('flat_bkg_for_detectors_scaled') else: subtractedWSName = wsNames.withSuffix('flat_bkg_subtracted_monitors') scaledBkgWSName = wsNames.withSuffix('flat_bkg_for_monitors_scaled') Scale(InputWorkspace=bkgWorkspace, OutputWorkspace=scaledBkgWSName, Factor=bkgScaling, EnableLogging=algorithmLogging) subtractedWS = Minus(LHSWorkspace=ws, RHSWorkspace=scaledBkgWSName, OutputWorkspace=subtractedWSName, EnableLogging=algorithmLogging) wsCleanup.cleanup(scaledBkgWSName) return subtractedWS
def _normaliseToSlits(self, ws): """Normalise ws to slit opening.""" if self.getProperty(Prop.SLIT_NORM).value == SlitNorm.OFF: return ws r = ws.run() slit2width = r.get('VirtualSlitAxis.s2w_actual_width') slit3width = r.get('VirtualSlitAxis.s3w_actual_width') if slit2width is None or slit3width is None: self.log().warning('Slit information not found in sample logs. Slit normalisation disabled.') return ws f = slit2width.value * slit3width.value normalisedWSName = self._names.withSuffix('normalised_to_slits') normalisedWS = Scale(InputWorkspace=ws, OutputWorkspace=normalisedWSName, Factor=1.0 / f, EnableLogging=self._subalgLogging) self._cleanup.cleanup(ws) return normalisedWS
def test_unmirror_0_1_2_3(self): args = { 'Run': '136553.nxs', 'UnmirrorOption': 0, 'OutputWorkspace': 'zero' } IndirectILLReductionQENS(**args) args['UnmirrorOption'] = 1 args['OutputWorkspace'] = 'both' IndirectILLReductionQENS(**args) args['UnmirrorOption'] = 2 args['OutputWorkspace'] = 'left' IndirectILLReductionQENS(**args) args['UnmirrorOption'] = 3 args['OutputWorkspace'] = 'right' IndirectILLReductionQENS(**args) summed = Plus(mtd['left_red'].getItem(0), mtd['right_red'].getItem(0)) Scale(InputWorkspace=summed, Factor=0.5, OutputWorkspace=summed) result = CompareWorkspaces(summed, mtd['both_red'].getItem(0)) self.assertTrue(result[0], "Unmirror 1 should be the sum of 2 and 3") left_right = GroupWorkspaces([ mtd['left_red'].getItem(0).getName(), mtd['right_red'].getItem(0).getName() ]) result = CompareWorkspaces(left_right, 'zero_red') self.assertTrue(result[0], "Unmirror 0 should be the group of 2 and 3")
def ResetNegatives2D(self, wsName, addMin, resetValue): # Check if workspace has negative values and correct them if necessary xData = mtd[wsName].extractX() yData = mtd[wsName].extractY() eData = mtd[wsName].extractE() if addMin: intMin = np.min(yData) # Check if minimal Intensity is negative. If it is, add -1*intMin to all intensities if intMin < 0: Scale(InputWorkspace=mtd[wsName], OutputWorkspace=mtd[wsName], Factor=-intMin, Operation="Add") else: yDataNew = np.where(yData < 0, resetValue, yData) CreateWorkspace(OutputWorkspace=mtd[wsName], DataX=xData, DataY=yDataNew, DataE=eData, NSpec=mtd[wsName].getNumberHistograms(), ParentWorkspace=mtd[wsName])
def _normalizeToTime(ws, wsNames, wsCleanup, algorithmLogging): """Normalize to the 'actual_time' sample log.""" log = ws.run() if not log.hasProperty('duration'): if not log.hasProperty('actual_time'): raise RuntimeError("Cannot normalise to acquisition time: 'duration' missing from sample logs.") time = log.getProperty('actual_time').value else: time = log.getProperty('duration').value if time == 0: raise RuntimeError("Cannot normalise to acquisition time: time is zero.") if time < 0: raise RuntimeError("Cannot normalise to acquisition time: time is negative.") normalizedWSName = wsNames.withSuffix('normalized_to_time') normalizedWS = Scale(InputWorkspace=ws, Factor=1./time, OutputWorkspace=normalizedWSName, EnableLogging=algorithmLogging) return normalizedWS
def PyExec(self): self.getInputs() xData = self.data.extractX() yData = self.data.extractY() eData = self.data.extractE() if self._addMin: intMin = np.min(yData) # Check if minimal Intensity is negative. If it is, add -1*intMin to all intensities if intMin < 0: Scale(InputWorkspace=self.data, OutputWorkspace=self.data, Factor=-intMin, Operation="Add") else: yDataNew = np.where(yData < 0, self._resetValue, yData) CreateWorkspace(OutputWorkspace=self.data, DataX=xData, DataY=yDataNew, DataE=eData, NSpec=self.data.getNumberHistograms(), ParentWorkspace=self.data)
def scale_monitor(workspace_name): """ Scale monitor intensity by a factor given as the Workflow.MonitorScalingFactor parameter. @param workspace_name Name of workspace to process monitor for """ from mantid.simpleapi import Scale monitor_workspace_name = workspace_name + '_mon' instrument = mtd[workspace_name].getInstrument() try: scale_factor = instrument.getNumberParameter('Workflow.Monitor1-ScalingFactor')[0] except IndexError: logger.information('No monitor scaling factor found for workspace %s' % workspace_name) return if scale_factor != 1.0: Scale(InputWorkspace=monitor_workspace_name, OutputWorkspace=monitor_workspace_name, Factor=1.0 / scale_factor, Operation='Multiply')
def setUp(self): """ Create sample workspaces. """ # Create some test data sample = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=1, XUnit='Wavelength', XMin=6.8, XMax=7.9, BinWidth=0.1) self._sample_ws = sample # Create empty test data not in wavelength sample_empty_unit = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=1, XUnit='Empty', XMin=6.8, XMax=7.9, BinWidth=0.1) SetInstrumentParameter(Workspace=sample_empty_unit, ParameterName='Efixed', ParameterType='Number', Value='5.') self._sample_empty_unit = sample_empty_unit empty_unit_point = ConvertToPointData(sample_empty_unit) self._empty_unit_point = empty_unit_point can = Scale(InputWorkspace=sample, Factor=1.2) self._can_ws = can self._corrections_ws_name = 'corrections'
def test_scale(self): """ Test if scaling is correct. @return: """ wrk_ref = Abins( AbInitioProgram=self._ab_initio_program, VibrationalOrPhononFile=self._squaricn + ".phonon", TemperatureInKelvin=self._temperature, SampleForm=self._sample_form, Instrument=self._instrument_name, Atoms=self._atoms, Scale=self._scale, SumContributions=self._sum_contributions, QuantumOrderEventsNumber=self._quantum_order_events_number, ScaleByCrossSection=self._cross_section_factor, OutputWorkspace=self._squaricn + "_ref") wrk = Abins(AbInitioProgram=self._ab_initio_program, VibrationalOrPhononFile=self._squaricn + ".phonon", TemperatureInKelvin=self._temperature, SampleForm=self._sample_form, Instrument=self._instrument_name, Atoms=self._atoms, SumContributions=self._sum_contributions, QuantumOrderEventsNumber=self._quantum_order_events_number, Scale=10, ScaleByCrossSection=self._cross_section_factor, OutputWorkspace="squaricn_scale") ref = Scale(wrk_ref, Factor=10) (result, messages) = CompareWorkspaces(wrk, ref, Tolerance=self._tolerance) self.assertEqual(result, True)
def _flux_normalization(self, w, target): """ Divide data by integrated flux intensity Parameters ---------- w: Mantid.EventsWorkspace Input workspace target: str Specify the entity the workspace refers to. Valid options are 'sample', 'background', and 'vanadium' Returns ------- Mantid.EventWorkspace """ valid_targets = ('sample', 'background', 'vanadium') if target not in valid_targets: raise KeyError('Target must be one of ' + ', '.join(valid_targets)) w_nor = None if self._flux_normalization_type == 'Monitor': _t_flux = None _t_flux_name = tws('monitor_aggregate') target_to_runs = dict(sample='RunNumbers', background='BackgroundRuns', vanadium='VanadiumRuns') rl = self._run_list(self.getProperty(target_to_runs[target]).value) _t_w_name = tws('monitor') for run in rl: run_name = '{0}_{1}'.format(self._short_inst, str(run)) _t_w = LoadNexusMonitors(run_name, OutputWorkspace=_t_w_name) if _t_flux is None: _t_flux = CloneWorkspace(_t_w, OutputWorkspace=_t_flux_name) else: _t_flux = Plus(_t_flux, _t_w, OutputWorkspace=_t_flux_name) _t_flux = ConvertUnits(_t_flux, Target='Wavelength', Emode='Elastic', OutputWorkspace=_t_flux_name) _t_flux = CropWorkspace(_t_flux, XMin=self._wavelength_band[0], XMax=self._wavelength_band[1], OutputWorkspace=_t_flux_name) _t_flux = OneMinusExponentialCor(_t_flux, C='0.20749999999999999', C1='0.001276', OutputWorkspace=_t_flux_name) _t_flux = Scale(_t_flux, Factor='1e-06', Operation='Multiply', OutputWorkspace=_t_flux_name) _t_flux = Integration(_t_flux, RangeLower=self._wavelength_band[0], RangeUpper=self._wavelength_band[1], OutputWorkspace=_t_flux_name) w_nor = Divide(w, _t_flux, OutputWorkspace=w.name()) else: aggregate_flux = None if self._flux_normalization_type == 'Proton Charge': aggregate_flux = w.getRun().getProtonCharge() elif self._flux_normalization_type == 'Duration': aggregate_flux = w.getRun().getProperty('duration').value w_nor = Scale(w, Operation='Multiply', Factor=1.0 / aggregate_flux, OutputWorkspace=w.name()) return w_nor
def PyExec(self): data = self.getProperty("InputWorkspace").value cal = self.getProperty("CalibrationWorkspace").value bkg = self.getProperty("BackgroundWorkspace").value mask = self.getProperty("MaskWorkspace").value target = self.getProperty("Target").value eFixed = self.getProperty("EFixed").value xMin = self.getProperty("XMin").value xMax = self.getProperty("XMax").value numberBins = self.getProperty("NumberBins").value normaliseBy = self.getProperty("NormaliseBy").value maskAngle = self.getProperty("MaskAngle").value outWS = self.getPropertyValue("OutputWorkspace") data_scale = 1 cal_scale = 1 bkg_scale = 1 if normaliseBy == "Monitor": data_scale = data.run().getProtonCharge() elif normaliseBy == "Time": data_scale = data.run().getLogData('duration').value ExtractMask(data, OutputWorkspace='__mask_tmp', EnableLogging=False) if maskAngle != Property.EMPTY_DBL: MaskAngle(Workspace='__mask_tmp', MinAngle=maskAngle, Angle='Phi', EnableLogging=False) if mask is not None: BinaryOperateMasks(InputWorkspace1='__mask_tmp', InputWorkspace2=mask, OperationType='OR', OutputWorkspace='__mask_tmp', EnableLogging=False) ExtractUnmaskedSpectra(InputWorkspace=data, MaskWorkspace='__mask_tmp', OutputWorkspace='__data_tmp', EnableLogging=False) if isinstance(mtd['__data_tmp'], IEventWorkspace): Integration(InputWorkspace='__data_tmp', OutputWorkspace='__data_tmp', EnableLogging=False) ConvertSpectrumAxis(InputWorkspace='__data_tmp', Target=target, EFixed=eFixed, OutputWorkspace=outWS, EnableLogging=False) Transpose(InputWorkspace=outWS, OutputWorkspace=outWS, EnableLogging=False) ResampleX(InputWorkspace=outWS, OutputWorkspace=outWS, XMin=xMin, XMax=xMax, NumberBins=numberBins, EnableLogging=False) if cal is not None: ExtractUnmaskedSpectra(InputWorkspace=cal, MaskWorkspace='__mask_tmp', OutputWorkspace='__cal_tmp', EnableLogging=False) if isinstance(mtd['__cal_tmp'], IEventWorkspace): Integration(InputWorkspace='__cal_tmp', OutputWorkspace='__cal_tmp', EnableLogging=False) CopyInstrumentParameters(data, '__cal_tmp', EnableLogging=False) ConvertSpectrumAxis(InputWorkspace='__cal_tmp', Target=target, EFixed=eFixed, OutputWorkspace='__cal_tmp', EnableLogging=False) Transpose(InputWorkspace='__cal_tmp', OutputWorkspace='__cal_tmp', EnableLogging=False) ResampleX(InputWorkspace='__cal_tmp', OutputWorkspace='__cal_tmp', XMin=xMin, XMax=xMax, NumberBins=numberBins, EnableLogging=False) Divide(LHSWorkspace=outWS, RHSWorkspace='__cal_tmp', OutputWorkspace=outWS, EnableLogging=False) if normaliseBy == "Monitor": cal_scale = cal.run().getProtonCharge() elif normaliseBy == "Time": cal_scale = cal.run().getLogData('duration').value Scale(InputWorkspace=outWS, OutputWorkspace=outWS, Factor=cal_scale / data_scale, EnableLogging=False) if bkg is not None: ExtractUnmaskedSpectra(InputWorkspace=bkg, MaskWorkspace='__mask_tmp', OutputWorkspace='__bkg_tmp', EnableLogging=False) if isinstance(mtd['__bkg_tmp'], IEventWorkspace): Integration(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp', EnableLogging=False) CopyInstrumentParameters(data, '__bkg_tmp', EnableLogging=False) ConvertSpectrumAxis(InputWorkspace='__bkg_tmp', Target=target, EFixed=eFixed, OutputWorkspace='__bkg_tmp', EnableLogging=False) Transpose(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp', EnableLogging=False) ResampleX(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp', XMin=xMin, XMax=xMax, NumberBins=numberBins, EnableLogging=False) if cal is not None: Divide(LHSWorkspace='__bkg_tmp', RHSWorkspace='__cal_tmp', OutputWorkspace='__bkg_tmp', EnableLogging=False) if normaliseBy == "Monitor": bkg_scale = bkg.run().getProtonCharge() elif normaliseBy == "Time": bkg_scale = bkg.run().getLogData('duration').value Scale(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp', Factor=cal_scale / bkg_scale, EnableLogging=False) Scale(InputWorkspace='__bkg_tmp', OutputWorkspace='__bkg_tmp', Factor=self.getProperty('BackgroundScale').value, EnableLogging=False) Minus(LHSWorkspace=outWS, RHSWorkspace='__bkg_tmp', OutputWorkspace=outWS, EnableLogging=False) self.setProperty("OutputWorkspace", outWS) # remove temp workspaces [ DeleteWorkspace(ws, EnableLogging=False) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]
def test_scale(self): ws = DensityOfStates(File=self._file_name, Scale=10) ref = DensityOfStates(File=self._file_name) ref = Scale(ref, Factor=10) self.assertEqual(CheckWorkspacesMatch(ws, ref), 'Success!')
def scale_ws(ws): ws = Scale(ws, 100, "Multiply")
OutputWorkspace='incSMK{0}'.format(K)) # What happens if one of the I(Q,t) files contains a different number of Q values? In the following special case, K=0.11, the I*Q,t) file contains spectra # for values of Q (0.02, 0.03, 0.04, 0.05,...,2.0). Thus, we have to rebin the spectra in the Q-coordinate if K == '0.11': Transpose(InputWorkspace='incSMK0.11_fqt.Re', OutputWorkspace='incSMK0.11_fqt.Re') #from I(Q,t) to I(t,Q) Rebin(InputWorkspace='incSMK0.11_fqt.Re', Params=[0.2, 0.2, 2.0], OutputWorkspace='incSMK0.11_fqt.Re' ) #Rebin in Q to (0.3, 0.5,..,1.9) Transpose( InputWorkspace='incSMK0.11_fqt.Re', OutputWorkspace='incSMK0.11_fqt.Re') # from I(t,Q) back to I(Q,t) # After rebin, the intensities I(Q,t=0) in the resulting spectra are different than for spectra at other K values. We rescale the intensities to match Scale(InputWorkspace='incSMK0.11_fqt.Re', factor=0.5, Operation='Multiply', OutputWorkspace='incSMK0.11_fqt.Re') # Fourier transform I(Q,t) to S(Q,E). Resulting workspaces are incSMK0.03_sqw .., incSMK0.15_sqw SassenaFFT(InputWorkspace='incSMK{0}'.format(K), FFTonlyRealpart=1, DetailedBalance=1, Temp=200) # Rebin S(Q,E) in E to the region [-0.2, 0.2] meV, of the same order as the experimental [-0.15, 0.15] but a bit broader Rebin(InputWorkspace='incSMK{0}_sqw'.format(K), Params=[-0.2, 0.0004, 0.2], OutputWorkspace='incSMK{0}_sqw'.format(K)) # Important remark: the system simulated in one POSS molecule. thus, our simulated system does not take into account the broadening # due to motions of the molecule center of mass that take place in the crystalline phase. As a result, the elastic line in the simulation # is over-represented. To correct this shortcoming in the simulations we will remove the simulated elastic line from the simulated S(Q,E) # and later include a term represented the elastic line in the fitting model. ws = mtd['incSMK{0}_sqw'.format(K)] # simulated S(Q,E)
def PyExec(self): data = self._expand_groups() bkg = self.getProperty( "BackgroundWorkspace").valueAsStr # same background for all cal = self.getProperty( "CalibrationWorkspace").value # same calibration for all numberBins = self.getProperty("NumberBins").value outWS = self.getPropertyValue("OutputWorkspace") summing = self.getProperty("Sum").value # [Yes or No] # convert all of the input workspaces into spectrum of "target" units (generally angle) data, masks = self._convert_data(data) # determine x-range xMin, xMax = self._locate_global_xlimit(data) # BEGIN_FOR: prcess_spectra for n, (_wsn, _mskn) in enumerate(zip(data, masks)): # resample spectra ResampleX( InputWorkspace=_wsn, OutputWorkspace=_wsn, XMin=xMin, XMax=xMax, NumberBins=numberBins, EnableLogging=False, ) # calibration if cal is not None: _ws_cal_resampled = self._resample_calibration( _wsn, _mskn, xMin, xMax) Divide( LHSWorkspace=_wsn, RHSWorkspace=_ws_cal_resampled, OutputWorkspace=_wsn, EnableLogging=False, ) else: _ws_cal_resampled = None Scale( InputWorkspace=_wsn, OutputWorkspace=_wsn, Factor=self._get_scale(cal) / self._get_scale(_wsn), EnableLogging=False, ) # background if bkg: _ws_bkg_resampled = self._resample_background( bkg, _wsn, _mskn, xMin, xMax, _ws_cal_resampled) Minus( LHSWorkspace=_wsn, RHSWorkspace=_ws_bkg_resampled, OutputWorkspace=_wsn, EnableLogging=False, ) if summing: # conjoin if n < 1: RenameWorkspace( InputWorkspace=_wsn, OutputWorkspace="__ws_conjoined", EnableLogging=False, ) else: # this adds to `InputWorkspace1` ConjoinWorkspaces( InputWorkspace1="__ws_conjoined", InputWorkspace2=_wsn, CheckOverlapping=False, EnableLogging=False, ) # END_FOR: prcess_spectra # Step_3: sum all spectra # ref: https://docs.mantidproject.org/nightly/algorithms/SumSpectra-v1.html if summing: if cal is not None: outWS = SumSpectra( InputWorkspace="__ws_conjoined", OutputWorkspace=outWS, WeightedSum=True, MultiplyBySpectra=not bool(cal), EnableLogging=False, ) else: outWS = SumSpectra( InputWorkspace="__ws_conjoined", OutputWorkspace=outWS, WeightedSum=True, MultiplyBySpectra=True, EnableLogging=False, ) else: if len(data) == 1: outWS = RenameWorkspace(InputWorkspace=data[0], OutputWorkspace=outWS) else: outWS = GroupWorkspaces(InputWorkspaces=data, OutputWorkspace=outWS) self.setProperty("OutputWorkspace", outWS) # Step_4: remove temp workspaces [ DeleteWorkspace(ws, EnableLogging=False) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]
def PyExec(self): data = self.getProperty("InputWorkspace").value # [1~n] bkg = self.getProperty("BackgroundWorkspace").value # [1~n] cal = self.getProperty("CalibrationWorkspace").value # [1] xMin = self.getProperty("XMin").value xMax = self.getProperty("XMax").value numberBins = self.getProperty("NumberBins").value outWS = self.getPropertyValue("OutputWorkspace") # NOTE: # StringArrayProperty cannot be optional, so the background can only be passed in as a string # or a list, which will be manually unpacked here if bkg != "": bkg = [ AnalysisDataService.retrieve(me) for me in map(str.strip, bkg.split(",")) ] # NOTE: # xMin and xMax are initialized as empty numpy.array (np.array([])). _xMin, _xMax = self._locate_global_xlimit() xMin = _xMin if xMin.size == 0 else xMin xMax = _xMax if xMax.size == 0 else xMax # BEGIN_FOR: prcess_spectra for n, _wsn in enumerate(data): _mskn = f"__mask_{n}" # calculated in previous loop _ws = AnalysisDataService.retrieve(_wsn) # resample spectra _ws_resampled = ResampleX( InputWorkspace=f"__ws_{n}", XMin=xMin, XMax=xMax, NumberBins=numberBins, EnableLogging=False, ) # calibration if cal is not None: _ws_cal_resampled = self._resample_calibration(_ws, _mskn, xMin, xMax) _ws_resampled = Divide( LHSWorkspace=_ws_resampled, RHSWorkspace=_ws_cal_resampled, EnableLogging=False, ) else: _ws_cal_resampled = None _ws_resampled = Scale( InputWorkspace=_ws_resampled, Factor=self._get_scale(cal) / self._get_scale(_ws), EnableLogging=False, ) # background if bkg != "": bgn = bkg[n] if isinstance(bkg, list) else bkg _ws_bkg_resampled = self._resample_background( bgn, _ws, _mskn, xMin, xMax, _ws_cal_resampled ) _ws_resampled = Minus( LHSWorkspace=_ws_resampled, RHSWorkspace=_ws_bkg_resampled, EnableLogging=False, ) # conjoin if n < 1: CloneWorkspace( InputWorkspace=_ws_resampled, OutputWorkspace="__ws_conjoined", EnableLogging=False, ) else: ConjoinWorkspaces( InputWorkspace1="__ws_conjoined", InputWorkspace2=_ws_resampled, CheckOverlapping=False, EnableLogging=False, ) # END_FOR: prcess_spectra # Step_3: sum all spectra # ref: https://docs.mantidproject.org/nightly/algorithms/SumSpectra-v1.html if cal is not None: SumSpectra( InputWorkspace="__ws_conjoined", OutputWorkspace=outWS, WeightedSum=True, MultiplyBySpectra=False, EnableLogging=False, ) else: SumSpectra( InputWorkspace="__ws_conjoined", OutputWorkspace=outWS, WeightedSum=True, MultiplyBySpectra=True, EnableLogging=False, ) self.setProperty("OutputWorkspace", outWS) # Step_4: remove temp workspaces [ DeleteWorkspace(ws, EnableLogging=False) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]
def _resample_background( self, current_background, current_workspace, make_name, x_min, x_max, resmapled_calibration, ): """Perform resample on given background""" cal = self.getProperty("CalibrationWorkspace").value target = self.getProperty("Target").value e_fixed = self.getProperty("EFixed").value number_bins = self.getProperty("NumberBins").value _ws_bkg = ExtractUnmaskedSpectra( InputWorkspace=current_background, MaskWorkspace=make_name, EnableLogging=False, ) if isinstance(mtd["_ws_bkg"], IEventWorkspace): _ws_bkg = Integration(InputWorkspace=_ws_bkg, EnableLogging=False) CopyInstrumentParameters( InputWorkspace=current_workspace, OutputWorkspace=_ws_bkg, EnableLogging=False, ) _ws_bkg = ConvertSpectrumAxis( InputWorkspace=_ws_bkg, Target=target, EFixed=e_fixed, EnableLogging=False, ) _ws_bkg = Transpose(InputWorkspace=_ws_bkg, EnableLogging=False) _ws_bkg_resampled = ResampleX( InputWorkspace=_ws_bkg, XMin=x_min, XMax=x_max, NumberBins=number_bins, EnableLogging=False, ) if cal is not None: _ws_bkg_resampled = Divide( LHSWorkspace=_ws_bkg_resampled, RHSWorkspace=resmapled_calibration, EnableLogging=False, ) _ws_bkg_resampled = Scale( InputWorkspace=_ws_bkg_resampled, Factor=self._get_scale(cal) / self._get_scale(current_background), EnableLogging=False, ) _ws_bkg_resampled = Scale( InputWorkspace=_ws_bkg_resampled, Factor=self.getProperty("BackgroundScale").value, EnableLogging=False, ) return _ws_bkg_resampled
def PyExec(self): # 0) Create reporter to report progress steps = 9 begin = 0 end = 1.0 prog_reporter = Progress(self, begin, end, steps) # 1) get input parameters from a user self._get_properties() prog_reporter.report("Input data from the user has been collected.") # 2) read ab initio data ab_initio_data = abins.AbinsData.from_calculation_data( self._vibrational_or_phonon_data_file, self._ab_initio_program) prog_reporter.report("Vibrational/phonon data has been read.") # 3) calculate S s_calculator = abins.SCalculatorFactory.init( filename=self._vibrational_or_phonon_data_file, temperature=self._temperature, sample_form=self._sample_form, abins_data=ab_initio_data, instrument=self._instrument, quantum_order_num=self._num_quantum_order_events, bin_width=self._bin_width) s_data = s_calculator.get_formatted_data() prog_reporter.report( "Dynamical structure factors have been determined.") # 4) get atoms for which S should be plotted self._extracted_ab_initio_data = ab_initio_data.get_atoms_data( ).extract() num_atoms = len(self._extracted_ab_initio_data) all_atms_smbls = list( set([ self._extracted_ab_initio_data["atom_%s" % atom]["symbol"] for atom in range(num_atoms) ])) all_atms_smbls.sort() if len(self._atoms) == 0: # case: all atoms atom_symbols = all_atms_smbls atom_numbers = [] else: # case selected atoms # Specific atoms are identified with prefix and integer index, e.g 'atom_5'. Other items are element symbols # A regular expression match is used to make the underscore separator optional and check the index format prefix = abins.constants.ATOM_PREFIX atom_symbols = [ item for item in self._atoms if item[:len(prefix)] != prefix ] if len(atom_symbols) != len( set(atom_symbols)): # only different types raise ValueError( "User atom selection (by symbol) contains repeated species. This is not permitted as " "Abins cannot create multiple workspaces with the same name." ) numbered_atom_test = re.compile('^' + prefix + r'_?(\d+)$') atom_numbers = [ numbered_atom_test.findall(item) for item in self._atoms ] # Matches will be lists of str atom_numbers = [int(match[0]) for match in atom_numbers if match] # Remove empty matches, cast rest to int if len(atom_numbers) != len(set(atom_numbers)): raise ValueError( "User atom selection (by number) contains repeated atom. This is not permitted as Abins" " cannot create multiple workspaces with the same name.") for atom_symbol in atom_symbols: if atom_symbol not in all_atms_smbls: raise ValueError( "User defined atom selection (by element) '%s': not present in the system." % atom_symbol) for atom_number in atom_numbers: if atom_number < 1 or atom_number > num_atoms: raise ValueError( "Invalid user atom selection (by number) '%s%s': out of range (%s - %s)" % (prefix, atom_number, 1, num_atoms)) # Final sanity check that everything in "atoms" field was understood if len(atom_symbols) + len(atom_numbers) < len(self._atoms): elements_report = " Symbols: " + ", ".join( atom_symbols) if len(atom_symbols) else "" numbers_report = " Numbers: " + ", ".join(atom_numbers) if len( atom_numbers) else "" raise ValueError( "Not all user atom selections ('atoms' option) were understood." + elements_report + numbers_report) prog_reporter.report( "Atoms, for which dynamical structure factors should be plotted, have been determined." ) # at the moment only types of atom, e.g, for benzene three options -> 1) C, H; 2) C; 3) H # 5) create workspaces for atoms in interest workspaces = [] if self._sample_form == "Powder": workspaces.extend( self._create_partial_s_per_type_workspaces( atoms_symbols=atom_symbols, s_data=s_data)) workspaces.extend( self._create_partial_s_per_type_workspaces( atom_numbers=atom_numbers, s_data=s_data)) prog_reporter.report( "Workspaces with partial dynamical structure factors have been constructed." ) # 6) Create a workspace with sum of all atoms if required if self._sum_contributions: total_atom_workspaces = [] for ws in workspaces: if "total" in ws: total_atom_workspaces.append(ws) total_workspace = self._create_total_workspace( partial_workspaces=total_atom_workspaces) workspaces.insert(0, total_workspace) prog_reporter.report( "Workspace with total S has been constructed.") # 7) add experimental data if available to the collection of workspaces if self._experimental_file != "": workspaces.insert( 0, self._create_experimental_data_workspace().name()) prog_reporter.report( "Workspace with the experimental data has been constructed.") GroupWorkspaces(InputWorkspaces=workspaces, OutputWorkspace=self._out_ws_name) # 8) save workspaces to ascii_file num_workspaces = mtd[self._out_ws_name].getNumberOfEntries() for wrk_num in range(num_workspaces): wrk = mtd[self._out_ws_name].getItem(wrk_num) SaveAscii(InputWorkspace=Scale(wrk, 1.0 / self._bin_width, "Multiply"), Filename=wrk.name() + ".dat", Separator="Space", WriteSpectrumID=False) prog_reporter.report("All workspaces have been saved to ASCII files.") # 9) set OutputWorkspace self.setProperty('OutputWorkspace', self._out_ws_name) prog_reporter.report( "Group workspace with all required dynamical structure factors has been constructed." )
def test_scale(self): wks = SimulatedDensityOfStates(PHONONFile=self._phonon_file, Scale=10) ref = SimulatedDensityOfStates(PHONONFile=self._phonon_file) ref = Scale(ref, Factor=10) self.assertEqual(CheckWorkspacesMatch(wks, ref), 'Success!')
def test_scale(self): wks = SimulatedDensityOfStates(PHONONFile=self._phonon_file, Scale=10) ref = SimulatedDensityOfStates(PHONONFile=self._phonon_file) ref = Scale(ref, Factor=10) self.assertTrue(CompareWorkspaces(wks, ref)[0])
def PyExec(self): # 0) Create reporter to report progress steps = 9 begin = 0 end = 1.0 prog_reporter = Progress(self, begin, end, steps) # 1) get input parameters from a user self._get_properties() prog_reporter.report("Input data from the user has been collected.") # 2) read ab initio data ab_initio_loaders = {"CASTEP": AbinsModules.LoadCASTEP, "CRYSTAL": AbinsModules.LoadCRYSTAL, "DMOL3": AbinsModules.LoadDMOL3, "GAUSSIAN": AbinsModules.LoadGAUSSIAN} rdr = ab_initio_loaders[self._ab_initio_program](input_ab_initio_filename=self._vibrational_or_phonon_data_file) ab_initio_data = rdr.get_formatted_data() prog_reporter.report("Vibrational/phonon data has been read.") # 3) calculate S s_calculator = AbinsModules.CalculateS.init(filename=self._vibrational_or_phonon_data_file, temperature=self._temperature, sample_form=self._sample_form, abins_data=ab_initio_data, instrument=self._instrument, quantum_order_num=self._num_quantum_order_events, bin_width=self._bin_width) s_data = s_calculator.get_formatted_data() prog_reporter.report("Dynamical structure factors have been determined.") # 4) get atoms for which S should be plotted self._extracted_ab_initio_data = ab_initio_data.get_atoms_data().extract() num_atoms = len(self._extracted_ab_initio_data) all_atms_smbls = list(set([self._extracted_ab_initio_data["atom_%s" % atom]["symbol"] for atom in range(num_atoms)])) all_atms_smbls.sort() if len(self._atoms) == 0: # case: all atoms atoms_symbol = all_atms_smbls else: # case selected atoms if len(self._atoms) != len(set(self._atoms)): # only different types raise ValueError("Not all user defined atoms are unique.") for atom_symbol in self._atoms: if atom_symbol not in all_atms_smbls: raise ValueError("User defined atom not present in the system.") atoms_symbol = self._atoms prog_reporter.report("Atoms, for which dynamical structure factors should be plotted, have been determined.") # at the moment only types of atom, e.g, for benzene three options -> 1) C, H; 2) C; 3) H # 5) create workspaces for atoms in interest workspaces = [] if self._sample_form == "Powder": workspaces.extend(self._create_partial_s_per_type_workspaces(atoms_symbols=atoms_symbol, s_data=s_data)) prog_reporter.report("Workspaces with partial dynamical structure factors have been constructed.") # 6) Create a workspace with sum of all atoms if required if self._sum_contributions: total_atom_workspaces = [] for ws in workspaces: if "total" in ws: total_atom_workspaces.append(ws) total_workspace = self._create_total_workspace(partial_workspaces=total_atom_workspaces) workspaces.insert(0, total_workspace) prog_reporter.report("Workspace with total S has been constructed.") # 7) add experimental data if available to the collection of workspaces if self._experimental_file != "": workspaces.insert(0, self._create_experimental_data_workspace().name()) prog_reporter.report("Workspace with the experimental data has been constructed.") GroupWorkspaces(InputWorkspaces=workspaces, OutputWorkspace=self._out_ws_name) # 8) save workspaces to ascii_file num_workspaces = mtd[self._out_ws_name].getNumberOfEntries() for wrk_num in range(num_workspaces): wrk = mtd[self._out_ws_name].getItem(wrk_num) SaveAscii(InputWorkspace=Scale(wrk, 1.0/self._bin_width, "Multiply"), Filename=wrk.name() + ".dat", Separator="Space", WriteSpectrumID=False) prog_reporter.report("All workspaces have been saved to ASCII files.") # 9) set OutputWorkspace self.setProperty('OutputWorkspace', self._out_ws_name) prog_reporter.report("Group workspace with all required dynamical structure factors has been constructed.")
SaveNexus(ws, nxs_van_file) return ws if powder: from mantid.simpleapi import LoadWAND, WANDPowderReduction, SavePlot1D, SaveFocusedXYE, Scale data = LoadWAND(filename, Grouping='4x4') runNumber = data.getRunNumber() cal = get_vanadium(runNumber) WANDPowderReduction(InputWorkspace=data, CalibrationWorkspace=cal, Target='Theta', NumberBins=1200, OutputWorkspace='reduced') Scale(InputWorkspace='reduced',OutputWorkspace='reduced',Factor=100) SaveFocusedXYE('reduced', Filename=os.path.join(outdir, output_file+'.xye'), SplitFiles=False, IncludeHeader=False) div = SavePlot1D('reduced', OutputType='plotly') request = publish_plot('HB2C', runNumber, files={'file': div}) else: # Single Crystal with h5py.File(filename, 'r') as f: offset = decode(f['/entry/DASlogs/HB2C:Mot:s2.RBV/average_value'].value[0]) title = decode(f['/entry/title'].value[0]) mon = decode(f['/entry/monitor1/total_counts'].value[0]) duration = decode(f['/entry/duration'].value[0]) run_number = decode(f['/entry/run_number'].value[0]) bc = np.zeros((512*480*8)) for b in range(8): bc += np.bincount(f['/entry/bank'+str(b+1)+'_events/event_id'].value,