def _wave_range(self): wave_range = '__wave_range' ExtractSingleSpectrum(InputWorkspace=self._sample_ws_name, OutputWorkspace=wave_range, WorkspaceIndex=0) Xin = mtd[wave_range].readX(0) wave_min = mtd[wave_range].readX(0)[0] wave_max = mtd[wave_range].readX(0)[len(Xin) - 1] number_waves = self._number_wavelengths wave_bin = (wave_max - wave_min) / (number_waves-1) self._waves = list() for idx in range(0, number_waves): self._waves.append(wave_min + idx * wave_bin) DeleteWorkspace(wave_range) if self._emode == 'Elastic': self._elastic = self._waves[int(len(self._waves) / 2)] elif self._emode == 'Direct': self._elastic = math.sqrt(81.787/self._efixed) # elastic wavelength elif self._emode == 'Indirect': self._elastic = math.sqrt(81.787/self._efixed) # elastic wavelength logger.information('Elastic lambda : %f' % (self._elastic)) logger.information('Lambda : %i values from %f to %f' % ( len(self._waves), self._waves[0], self._waves[-1]))
def _correct_sample(self, sample_workspace, a_ss_workspace): """ Correct for sample only (when no container is given). """ logger.information('Correcting sample') return sample_workspace / self._convert_units_wavelength(a_ss_workspace)
def _correct_sample_can(self): """ Correct for sample and container. """ logger.information("Correcting sample and container") corrected_can_ws = "__corrected_can" # Acc Divide( LHSWorkspace=self._scaled_container, RHSWorkspace=self._corrections + "_acc", OutputWorkspace=corrected_can_ws, ) # Acsc Multiply( LHSWorkspace=corrected_can_ws, RHSWorkspace=self._corrections + "_acsc", OutputWorkspace=corrected_can_ws ) Minus(LHSWorkspace=self._sample_ws_name, RHSWorkspace=corrected_can_ws, OutputWorkspace=self._output_ws_name) # Assc Divide( LHSWorkspace=self._output_ws_name, RHSWorkspace=self._corrections + "_assc", OutputWorkspace=self._output_ws_name, ) DeleteWorkspace(corrected_can_ws)
def _setup(self): self._sample_ws_name = self.getPropertyValue('SofqWorkspace') logger.information('SofQ : ' + self._sample_ws_name) self._sample_chemical_formula = self.getPropertyValue('SampleChemicalFormula') self._sample_density_type = self.getProperty('SampleDensityType').value self._sample_density = self.getProperty('SampleDensity').value self._nrun1 = self.getProperty('NeutronsSingle').value self._nrun2 = self.getProperty('NeutronsMultiple').value self._geom = self.getPropertyValue('Geometry') logger.information('Geometry : ' + self._geom) self._numb_scat = self.getProperty('NumberScatterings').value if self._numb_scat < 1: raise ValueError('Number of scatterings %i is less than 1' %(self._numb_scat)) if self._numb_scat > 5: self._numb_scat = 5 logger.information('Number of scatterings set to 5 (max)') else: logger.information('Number of scatterings : %i ' % (self._numb_scat)) self._thickness = self.getProperty('Thickness').value self._width = self.getProperty('Width').value self._height = self.getProperty('Height').value self._wave = self.getProperty('Wavelength').value self._number_angles = self.getProperty('NumberAngles').value self._q_values = mtd[self._sample_ws_name].readX(0) # q array self._delta_q = self._q_values[1] - self._q_values[0] self._sofq = mtd[self._sample_ws_name].readY(0) # S(q) values self._number_q = len(self._q_values) logger.information('Number of S(Q) values : %i ' % (self._number_q)) self._plot = self.getProperty('Plot').value self._save = self.getProperty('Save').value
def populate_interfaces_menu(self): interface_dir = ConfigService['mantidqt.python_interfaces_directory'] items = ConfigService['mantidqt.python_interfaces'].split() # list of custom interfaces that are not qt4/qt5 compatible GUI_BLACKLIST = ['ISIS_Reflectometry_Old.py', 'ISIS_SANS_v2_experimental.py', 'Frequency_Domain_Analysis.py', 'Elemental_Analysis.py'] # detect the python interfaces interfaces = {} for item in items: key, scriptname = item.split('/') if not os.path.exists(os.path.join(interface_dir, scriptname)): logger.warning('Failed to find script "{}" in "{}"'.format(scriptname, interface_dir)) continue if scriptname in GUI_BLACKLIST: logger.information('Not adding gui "{}"'.format(scriptname)) continue temp = interfaces.get(key, []) temp.append(scriptname) interfaces[key] = temp # add the interfaces to the menu keys = list(interfaces.keys()) keys.sort() for key in keys: submenu = self.interfaces_menu.addMenu(key) names = interfaces[key] names.sort() for name in names: action = submenu.addAction(name.replace('.py', '').replace('_', ' ')) script = os.path.join(interface_dir, name) action.triggered.connect(lambda checked, script=script: self.launch_custom_gui(script))
def _update_instrument_angles(self, workspace, q_values, wave): """ Updates the instrument angles in the specified workspace, using the specified wavelength and the specified Q-Values. This is required when calculating absorption corrections for indirect elastic. :param workspace: The workspace whose instrument angles to update. :param q_values: The extracted Q-Values (MomentumTransfer) :param wave: The wavelength """ work_dir = config['defaultsave.directory'] k0 = 4.0 * math.pi / wave theta = 2.0 * np.degrees(np.arcsin(q_values / k0)) # convert to angle filename = 'Elastic_angles.txt' path = os.path.join(work_dir, filename) logger.information('Creating angles file : ' + path) handle = open(path, 'w') head = 'spectrum,theta' handle.write(head + " \n") for n in range(0, len(theta)): handle.write(str(n + 1) + ' ' + str(theta[n]) + "\n") handle.close() update_alg = self.createChildAlgorithm("UpdateInstrumentFromFile", enableLogging=False) update_alg.setProperty("Workspace", workspace) update_alg.setProperty("Filename", path) update_alg.setProperty("MoveMonitors", False) update_alg.setProperty("IgnorePhi", True) update_alg.setProperty("AsciiHeader", head) update_alg.setProperty("SkipFirstNLines", 1)
def onProcessStarted(self): """ Triggered when the sample processing starts. """ logger.information( "Starting of sample {0} processing".format(self._index + 1)) self._status = self.STATUS_PENDING self.statusChanged.emit()
def _save_ws(self, input_ws, text): workdir = config['defaultsave.directory'] path = os.path.join(workdir, input_ws + '.nxs') save_alg = self.createChildAlgorithm("SaveNexusProcessed", enableLogging = True) save_alg.setProperty("InputWorkspace", input_ws) save_alg.setProperty("Filename", path) save_alg.execute() logger.information('%s file saved as %s' % (text, path))
def _correct_sample(self, sample_workspace, a_ss_workspace): """ Correct for sample only (when no container is given). """ logger.information('Correcting sample') return sample_workspace / self._convert_units_wavelength( a_ss_workspace)
def _transmission(self): distance = self._radii[1] - self._radii[0] trans = math.exp(-distance * self._density[0] * (self._sig_s[0] + self._sig_a[0])) logger.information("Sample transmission : %f" % (trans)) if self._use_can: distance = self._radii[2] - self._radii[1] trans = math.exp(-distance * self._density[1] * (self._sig_s[1] + self._sig_a[1])) logger.information("Can transmission : %f" % (trans))
def _transmission(self): distance = self._radii[1] - self._radii[0] trans = math.exp(-distance*self._density[0]*(self._sig_s[0] + self._sig_a[0])) logger.information('Sample transmission : %f' % trans) if self._use_can: distance = self._radii[2] - self._radii[1] trans = math.exp(-distance*self._density[1]*(self._sig_s[1] + self._sig_a[1])) logger.information('Can transmission : %f' % trans)
def _save_output(self, workspaces): workdir = config['defaultsave.directory'] for ws in workspaces: path = os.path.join(workdir, ws + '.nxs') logger.information('Creating file : %s' % path) save_alg = self.createChildAlgorithm("SaveNexusProcessed", enableLogging = False) save_alg.setProperty("InputWorkspace", ws) save_alg.setProperty("Filename", path) save_alg.execute()
def _correct_sample(self, sample_workspace, a_ss_workspace): """ Correct for sample only (when no container is given). """ logger.information('Correcting sample') correction_in_lambda = self._convert_units_wavelength(a_ss_workspace) corrected = Divide(LHSWorkspace=sample_workspace, RHSWorkspace=correction_in_lambda) return corrected
def _validate_crystal_input_file(self, filename_full_path=None): """ Method to validate input file for CRYSTAL ab initio program. :param filename_full_path: full path of a file to check. :returns: True if file is valid otherwise false. """ logger.information("Validate CRYSTAL file with vibrational or phonon data.") return self._validate_ab_initio_file_extension(filename_full_path=filename_full_path, expected_file_extension=".out")
def _validate_gaussian_input_file(self, filename_full_path=None): """ Method to validate input file for GAUSSIAN ab initio program. :param filename_full_path: full path of a file to check. :returns: True if file is valid otherwise false. """ logger.information("Validate GAUSSIAN file with vibration data.") return self._validate_ab_initio_file_extension(filename_full_path=filename_full_path, expected_file_extension=".log")
def _validate_dmol3_input_file(self, filename_full_path=None): """ Method to validate input file for DMOL3 ab initio program. :param filename_full_path: full path of a file to check. :returns: True if file is valid otherwise false. """ logger.information("Validate DMOL3 file with vibrational data.") return self._validate_ab_initio_file_extension(filename_full_path=filename_full_path, expected_file_extension=".outmol")
def _find_das_version(self): boundary_run = 90000 # from VDAS.v1900_2018 to VDAS.v2019_2100 runs = self.getProperty('RunNumbers').value first_run = int(self._run_list(runs)[0]) if first_run < boundary_run: self._das_version = VDAS.v1900_2018 else: self._das_version = VDAS.v2019_2100 logger.information('DAS version is ' + str(self._das_version))
def load_and_rebin(runs: List[int], output_workspace: str, rebin_params: List[float], banks: Optional[List[int]] = None) -> Workspace2D: r""" @brief Load a list of run numbers and rebin This function assumes the runs are large and events cannot be all loaded into memory. Hence, a run is loaded at a time, rebinned to TOF counts, events are dropped, and counts are added to the cumulative histogram resulting from loading the previous runs. @param runs : list of run numbers @param rebin_params : a triad of first, step, and last. A negative step indicates logarithmic binning @param output_workspace : the name of the output `MatrixWorkspace` @param banks : list of bank numbers, if one wants to load only certain banks. @return handle to the output workspace """ instrument = 'CORELLI' kwargs = {} if banks is None else { 'BankName': ','.join([f'bank{b}' for b in banks]) } # Load the first run logger.information( f'Loading run {runs[0]}. {len(runs)} runs remaining to be loaded') LoadEventNexus(Filename=f'{instrument}_{runs[0]}', OutputWorkspace=output_workspace, LoadLogs=False, **kwargs) if rebin_params is not None: Rebin(InputWorkspace=output_workspace, OutputWorkspace=output_workspace, Params=rebin_params, PreserveEvents=False) # Iteratively load the remaining run, adding to the final workspace each time try: single_run = '__single_run_' + output_workspace for i, run in enumerate(runs[1:]): logger.information( f'Loading run {run}. {len(runs) - 1 - i} runs remaining to be loaded' ) LoadEventNexus(Filename=f'{instrument}_{run}', OutputWorkspace=single_run, LoadLogs=False, **kwargs) if rebin_params is not None: Rebin(InputWorkspace=single_run, OutputWorkspace=single_run, Params=rebin_params, PreserveEvents=False) Plus(LHSWorkspace=output_workspace, RHSWorkspace=single_run, OutputWorkspace=output_workspace) DeleteWorkspace(single_run) # save memory as quick as possible except RuntimeError: DeleteWorkspace(single_run) # a bit of clean-up return mtd[output_workspace]
def _calculate_parameters(self): """ Calculates the TransformToIqt parameters and saves in a table workspace. """ CropWorkspace(InputWorkspace=self._sample, OutputWorkspace='__TransformToIqt_sample_cropped', Xmin=self._e_min, Xmax=self._e_max) x_data = mtd['__TransformToIqt_sample_cropped'].readX(0) number_input_points = len(x_data) - 1 num_bins = int(number_input_points / self._number_points_per_bin) self._e_width = (abs(self._e_min) + abs(self._e_max)) / num_bins try: instrument = mtd[self._sample].getInstrument() analyserName = instrument.getStringParameter('analyser')[0] analyser = instrument.getComponentByName(analyserName) if analyser is not None: logger.debug('Found %s component in instrument %s, will look for resolution there' % (analyserName, instrument)) resolution = analyser.getNumberParameter('resolution')[0] else: logger.debug('No %s component found on instrument %s, will look for resolution in top level instrument' % (analyserName, instrument)) resolution = instrument.getNumberParameter('resolution')[0] logger.information('Got resolution from IPF: %f' % resolution) except (AttributeError, IndexError): resolution = 0.0175 logger.warning('Could not get resolution from IPF, using default value: %f' % (resolution)) resolution_bins = int(round((2 * resolution) / self._e_width)) if resolution_bins < 5: logger.warning('Resolution curve has <5 points. Results may be unreliable.') param_table = CreateEmptyTableWorkspace(OutputWorkspace=self._parameter_table) param_table.addColumn('int', 'SampleInputBins') param_table.addColumn('float', 'BinReductionFactor') param_table.addColumn('int', 'SampleOutputBins') param_table.addColumn('float', 'EnergyMin') param_table.addColumn('float', 'EnergyMax') param_table.addColumn('float', 'EnergyWidth') param_table.addColumn('float', 'Resolution') param_table.addColumn('int', 'ResolutionBins') param_table.addRow([number_input_points, self._number_points_per_bin, num_bins, self._e_min, self._e_max, self._e_width, resolution, resolution_bins]) DeleteWorkspace('__TransformToIqt_sample_cropped') self.setProperty('ParameterWorkspace', param_table)
def _subtract(self): """ Do a simple container subtraction (when no corrections are given). """ logger.information("Using simple container subtraction") Minus( LHSWorkspace=self._sample_ws_name, RHSWorkspace=self._scaled_container, OutputWorkspace=self._output_ws_name )
def _calc_angles(self): Qmax = 4.0*math.pi/self._wave theta_r = np.arcsin(self._q_values/Qmax) theta_d = 2.0*np.rad2deg(theta_r) ang_inc = (theta_d[len(theta_d) - 1] - theta_d[0])/self._number_angles self._angles = np.zeros(self._number_angles) for idx_ang in range(self._number_angles): self._angles[idx_ang] = theta_d[0] + idx_ang*ang_inc logger.information('Number of angles : %i ; from %f to %f ' % (self._number_angles, self._angles[0], self._angles[self._number_angles -1]))
def _correct_sample(self): """ Correct for sample only (when no container is given). """ logger.information('Correcting sample') # Ass s_api.Divide(LHSWorkspace=self._sample_ws_wavelength, RHSWorkspace=self._corrections + '_ass', OutputWorkspace=self._output_ws_name)
def onProcessSuccess(self): """ Triggered when the sample process succeed. """ logger.information( "Processing of sample {0} finished with sucess".format( self._index + 1)) if self._exporter is not None: self._exporter.run(self) self._status = self.STATUS_PROCESSED self.statusChanged.emit()
def _get_angles(self): num_hist = mtd[self._sample_ws_name].getNumberHistograms() source_pos = mtd[self._sample_ws_name].getInstrument().getSource().getPos() sample_pos = mtd[self._sample_ws_name].getInstrument().getSample().getPos() beam_pos = sample_pos - source_pos self._angles = list() for index in range(0, num_hist): detector = mtd[self._sample_ws_name].getDetector(index) two_theta = detector.getTwoTheta(sample_pos, beam_pos) * 180.0 / math.pi self._angles.append(two_theta) logger.information('Detector angles : %i from %f to %f ' % ( len(self._angles), self._angles[0], self._angles[-1]))
def _read_header(self, asc): head = [] lines = 0 for m in range(30): char = asc[m] if char.startswith('#'): #check if line begins with a # head.append(asc[m]) #list of lines lines = m #number of lines logger.information('Data Header : ') for m in range(0, lines - 2): logger.information(head[m]) return lines, head
def _setup(self): self._sample_ws_name = self.getPropertyValue('SampleWorkspace') self._sample_chemical_formula = self.getPropertyValue('SampleChemicalFormula') self._sample_coherent_cross_section = self.getPropertyValue('SampleCoherentXSection') self._sample_incoherent_cross_section = self.getPropertyValue('SampleIncoherentXSection') self._sample_attenuation_cross_section = self.getPropertyValue('SampleAttenuationXSection') self._sample_density_type = self.getPropertyValue('SampleDensityType') self._sample_number_density_unit = self.getPropertyValue('SampleNumberDensityUnit') self._sample_density = self.getProperty('SampleDensity').value self._sample_thickness = self.getProperty('SampleThickness').value self._sample_angle = self.getProperty('SampleAngle').value self._can_ws_name = self.getPropertyValue('CanWorkspace') self._use_can = self._can_ws_name != '' self._can_chemical_formula = self.getPropertyValue('CanChemicalFormula') self._can_coherent_cross_section = self.getPropertyValue('CanCoherentXSection') self._can_incoherent_cross_section = self.getPropertyValue('CanIncoherentXSection') self._can_attenuation_cross_section = self.getPropertyValue('CanAttenuationXSection') self._can_density_type = self.getPropertyValue('CanDensityType') self._can_number_density_unit = self.getPropertyValue('CanNumberDensityUnit') self._can_density = self.getProperty('CanDensity').value self._can_front_thickness = self.getProperty('CanFrontThickness').value self._can_back_thickness = self.getProperty('CanBackThickness').value self._number_wavelengths = self.getProperty('NumberWavelengths').value self._interpolate = self.getProperty('Interpolate').value self._emode = self.getPropertyValue('Emode') self._efixed = self.getProperty('Efixed').value if (self._emode == 'Efixed' or self._emode == 'Direct' or self._emode == 'Indirect') and self._efixed == 0.: # Efixed mode requested with default efixed, try to read from Instrument Parameters try: self._efixed = self._getEfixed() logger.information('Found Efixed = {0}'.format(self._efixed)) except ValueError: raise RuntimeError('Efixed, Direct or Indirect mode requested with the default value,' 'but could not find the Efixed parameter in the instrument.') if self._emode == 'Efixed': logger.information('No interpolation is possible in Efixed mode.') self._interpolate = False self._set_sample_method = 'Chemical Formula' if self._sample_chemical_formula != '' else 'Cross Sections' self._set_can_method = 'Chemical Formula' if self._can_chemical_formula != '' else 'Cross Sections' self._output_ws_name = self.getPropertyValue('OutputWorkspace') # purge the lists self._angles = list() self._wavelengths = list()
def _get_angles(self): num_hist = mtd[self._sample_ws_name].getNumberHistograms() angle_prog = Progress(self, start=0.03, end=0.07, nreports=num_hist) source_pos = mtd[self._sample_ws_name].getInstrument().getSource().getPos() sample_pos = mtd[self._sample_ws_name].getInstrument().getSample().getPos() beam_pos = sample_pos - source_pos self._angles = list() for index in range(0, num_hist): angle_prog.report('Obtaining data for detector angle %i' % index) detector = mtd[self._sample_ws_name].getDetector(index) two_theta = detector.getTwoTheta(sample_pos, beam_pos) * 180.0 / math.pi self._angles.append(two_theta) logger.information('Detector angles : %i from %f to %f ' % (len(self._angles), self._angles[0], self._angles[-1]))
def _correct_sample(self): """ Correct for sample only (when no container is given). """ logger.information("Correcting sample") # Ass Divide( LHSWorkspace=self._sample_ws_name, RHSWorkspace=self._corrections + "_ass", OutputWorkspace=self._output_ws_name, )
def _validate_vasp_input_file(cls, filename_full_path: str) -> dict: logger.information("Validate VASP file with vibrational or phonon data.") if 'OUTCAR' in os.path.basename(filename_full_path): return dict(Invalid=False, Comment="") else: output = cls._validate_ab_initio_file_extension(ab_initio_program="VASP", filename_full_path=filename_full_path, expected_file_extension=".xml") if output["Invalid"]: output["Comment"] = ("Invalid filename {}. Expected OUTCAR, *.OUTCAR or" " *.xml for VASP calculation output. Please rename your file and try again. " .format(filename_full_path)) return output
def _subtract(self, minuend_workspace, subtrahend_workspace): """ Do a simple container subtraction (when no corrections are given). """ logger.information('Using simple container subtraction') if self._rebin_container_ws: logger.information('Rebining container to ensure Minus') subtrahend_workspace = s_api.RebinToWorkspace(WorkspaceToRebin=subtrahend_workspace, WorkspaceToMatch=minuend_workspace, OutputWorkspace="__rebinned", StoreInADS=False) return minuend_workspace - subtrahend_workspace
def _sample(self): set_material_alg = self.createChildAlgorithm('SetSampleMaterial') logger.information('Sample chemical formula : %s ' % (self._sample_chemical_formula)) if self._sample_density_type == 'Mass Density': logger.information('Sample Mass density : %f' % (self._sample_density)) set_material_alg.setProperty('SampleMassDensity', self._sample_density) builder = MaterialBuilder() mat = builder.setFormula(self._sample_chemical_formula).setMassDensity(self._sample_density).build() self._sample_number_density = mat.numberDensity logger.information('Sample Number density : %f' % (self._sample_number_density)) else: self._sample_number_density = self._sample_density logger.information('Sample Number density : %f' % (self._sample_number_density)) set_material_alg.setProperty('SampleNumberDensity', self._sample_number_density) set_material_alg.setProperty('InputWorkspace', self._sample_ws_name) set_material_alg.setProperty('ChemicalFormula', self._sample_chemical_formula) set_material_alg.execute() sam_material = mtd[self._sample_ws_name].sample().getMaterial() # Sample cross section self._sigc = sam_material.cohScatterXSection() self._sigi = sam_material.incohScatterXSection() self._sigt = sam_material.totalScatterXSection() self._siga_in = sam_material.absorbXSection() self._siga = self._siga_in self._sigss = self._sofq*self._sigc + self._sigi #q_dependent total scatt X-sect self._sigss = np.log(self._sigss) #interpolation later on self._sofq = np.log(self._sofq/self._sigt) #S(Q) normalised
def _correct_sample_can(self): """ Correct for sample and container. """ logger.information('Correcting sample and container') corrected_can_ws = '__corrected_can' factor_types = ['_ass'] if self._use_can: factor_types.extend(['_acc', '_acsc', '_assc']) corr_unit = s_api.mtd[self._corrections + '_ass'].getAxis(0).getUnit().unitID() for f_type in factor_types: self._convert_units_wavelength(corr_unit, self._corrections + f_type, self._corrections + f_type, "Wavelength") if self._rebin_container_ws: s_api.RebinToWorkspace( WorkspaceToRebin=self._scaled_container_wavelength, WorkspaceToMatch=self._corrections + '_acc', OutputWorkspace=self._scaled_container_wavelength) # Acc s_api.Divide(LHSWorkspace=self._scaled_container_wavelength, RHSWorkspace=self._corrections + '_acc', OutputWorkspace=corrected_can_ws) # Acsc s_api.Multiply(LHSWorkspace=corrected_can_ws, RHSWorkspace=self._corrections + '_acsc', OutputWorkspace=corrected_can_ws) s_api.Minus(LHSWorkspace=self._sample_ws_wavelength, RHSWorkspace=corrected_can_ws, OutputWorkspace=self._output_ws_name) # Assc s_api.Divide(LHSWorkspace=self._output_ws_name, RHSWorkspace=self._corrections + '_assc', OutputWorkspace=self._output_ws_name) for f_type in factor_types: self._convert_units_wavelength(corr_unit, self._corrections + f_type, self._corrections + f_type, corr_unit) s_api.DeleteWorkspace(corrected_can_ws)
def _subtract(self): """ Do a simple container subtraction (when no corrections are given). """ logger.information('Rebining container to ensure Minus') RebinToWorkspace(WorkspaceToRebin=self._can_ws_name, WorkspaceToMatch=self._sample_ws_name, OutputWorkspace=self._can_ws_name) logger.information('Using simple container subtraction') Minus(LHSWorkspace=self._sample_ws_name, RHSWorkspace=self._scaled_container, OutputWorkspace=self._output_ws_name)
def _fit_lor(self, ws): fun = 'name=Lorentzian, Amplitude=1.0, PeakCentre=0.0, FWHM=%f' % self._resolution Fit(InputWorkspace=ws, Function=fun, Output='__peak', OutputParametersOnly=True, EnableLogging=False) params_table = '__peak_Parameters' para_y = np.asarray(mtd[params_table].column('Value')) height = para_y[0] FWHM = para_y[2] logger.information('Width is %f' % FWHM) self._delete_ws('__peak_Parameters') self._delete_ws('__peak_NormalisedCovarianceMatrix') return height, FWHM
def PyExec(self): self._setup() self._calculate_parameters() if not self._dry_run: self._transform() self._add_logs() else: logger.information('Dry run, will not run TransformToIqt') self.setProperty('ParameterWorkspace', self._parameter_table) self.setProperty('OutputWorkspace', self._output_workspace)
def PyExec(self): self._setup() DeconD4LoadSofQ(FilePath=self._input_path, Wavelength=self._lambda, ZeroAngle=self._azero) self._deriv_name = self._input_name + '_deriv' logger.information('Derivatives workspace : %s' % self._deriv_name) DeconCalculateDerivatives(DataWorkspace=self._theta_name, DerivativesWorkspace=self._deriv_name) if self._save: #Save option self._save_result() if self._plot: #Plot option self._plot_result()
def _pre_process_corrections(self): """ If the sample is not in wavelength then convert the corrections to whatever units the sample is in. """ unit_id = mtd[self._sample_ws_name].getAxis(0).getUnit().unitID() logger.information('x-unit is ' + unit_id) factor_types = ['ass'] if self._use_can: factor_types.extend(['acc', 'acsc', 'assc']) for factor_type in factor_types: input_name = self._get_correction_factor_ws_name(factor_type) output_name = self._corrections + '_' + factor_type if unit_id != 'Wavelength': # Configure conversion if unit_id == 'dSpacing': emode = 'Elastic' efixed = 0.0 elif unit_id == 'DeltaE': emode = 'Indirect' from IndirectCommon import getEfixed efixed = getEfixed(mtd[self._sample_ws_name]) else: raise ValueError( 'Unit %s in sample workspace is not supported' % unit_id) # Do conversion ConvertUnits(InputWorkspace=input_name, OutputWorkspace=output_name, Target=unit_id, EMode=emode, EFixed=efixed) else: # No need to convert CloneWorkspace(InputWorkspace=input_name, OutputWorkspace=output_name) # Group the temporary factor workspaces (for easy removal later) GroupWorkspaces(InputWorkspaces=[ self._corrections + '_' + f_type for f_type in factor_types ], OutputWorkspace=self._corrections)
def _correct_sample_can(self, sample_workspace, container_workspace, factor_workspaces): """ Correct for sample and container. """ logger.information('Correcting sample and container') factor_workspaces_wavelength = {factor: self._convert_units_wavelength(workspace) for factor, workspace in factor_workspaces.items()} if self._rebin_container_ws: container_workspace = s_api.RebinToWorkspace(WorkspaceToRebin=container_workspace, WorkspaceToMatch=factor_workspaces_wavelength['acc'], OutputWorkspace="rebinned", StoreInADS=False) return self._corrections_approximation(sample_workspace, container_workspace, factor_workspaces_wavelength)
def _q_to_theta(self): #convert from Q to 2theta k0 = 4.0 * math.pi / float(self._lambda) self._clone_ws(self._temp, self._theta_ws) x_th = mtd[self._theta_ws].dataX(0) x_th = 2.0 * np.degrees(np.arcsin(x_th / k0)) #convert to angle x_th = x_th - self._azero #apply zero angle correction mtd[self._theta_ws].setX(0, x_th) unitx = mtd[self._theta_ws].getAxis(0).setUnit("Label") unitx.setLabel('2theta', 'deg') self._sofq_logs = [('lambda_in', self._lambda), ('zero_in', self._azero)] log_names = [item[0] for item in self._sofq_logs] log_values = [item[1] for item in self._sofq_logs] self._add_sample_log_mult(self._theta_ws, log_names, log_values) logger.information('Convert Q to 2theta : lambda = %f ; zero= %f' % (self._lambda, self._azero)) logger.information('2Theta : %f to %f' % (x_th[0], x_th[len(x_th) - 1]))
def _correct_sample_can(self): """ Correct for sample and container. """ logger.information('Correcting sample and container') corrected_can_ws = '__corrected_can' factor_types = ['_ass'] if self._use_can: factor_types.extend(['_acc', '_acsc', '_assc']) corr_unit = s_api.mtd[self._corrections + '_ass'].getAxis(0).getUnit().unitID() for f_type in factor_types: self._convert_units_wavelength(corr_unit, self._corrections + f_type, self._corrections + f_type, "Wavelength") s_api.RebinToWorkspace(WorkspaceToRebin=self._scaled_container_wavelength, WorkspaceToMatch=self._corrections + '_acc', OutputWorkspace=self._scaled_container_wavelength) # Acc s_api.Divide(LHSWorkspace=self._scaled_container_wavelength, RHSWorkspace=self._corrections + '_acc', OutputWorkspace=corrected_can_ws) # Acsc s_api.Multiply(LHSWorkspace=corrected_can_ws, RHSWorkspace=self._corrections + '_acsc', OutputWorkspace=corrected_can_ws) s_api.Minus(LHSWorkspace=self._sample_ws_wavelength, RHSWorkspace=corrected_can_ws, OutputWorkspace=self._output_ws_name) # Assc s_api.Divide(LHSWorkspace=self._output_ws_name, RHSWorkspace=self._corrections + '_assc', OutputWorkspace=self._output_ws_name) for f_type in factor_types: self._convert_units_wavelength(corr_unit, self._corrections + f_type, self._corrections + f_type, corr_unit) s_api.DeleteWorkspace(corrected_can_ws)
def _subtract(self): """ Do a simple container subtraction (when no corrections are given). """ logger.information('Using simple container subtraction') if self._rebin_container_ws: logger.information('Rebining container to ensure Minus') s_api.RebinToWorkspace( WorkspaceToRebin=self._scaled_container_wavelength, WorkspaceToMatch=self._sample_ws_wavelength, OutputWorkspace=self._scaled_container_wavelength) s_api.Minus(LHSWorkspace=self._sample_ws_wavelength, RHSWorkspace=self._scaled_container_wavelength, OutputWorkspace=self._output_ws_name)
def _create_and_save_configuration(self): logger.notice('Reduction will not be carried out') # # configuration file name and save location # basename = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') dir_name = self.getProperty('ConfigSaveDir').value if len(dir_name) <= 0: # default directory run_number = self.getProperty('RunNumbers').value[ 0] # first run number dir_name = Path(self.get_IPTS_Local( run_number)) / 'shared' / 'autoreduce' / 'configurations' dir_name.mkdir( parents=True, exist_ok=True) # in case it has not yet been created filename = str(Path(dir_name) / f'{basename}.json') # # Selected algorithm properties as a dictionary # dict_repr = json.loads(str(self)).get( 'properties' ) # representation of the algorithm's properties in a dict # Remove not wanted properties for not_wanted in ('RunNumbers', 'OutputDirectory', 'EnableConfigurator', 'ConfigSaveDir'): if not_wanted in dict_repr: del dict_repr[not_wanted] """ hack to fix the entry for the default JSON represenation of property DetCalFilename, which is saved as a list of lists Example: "DetCalFilename": [ ["/SNS/SNAP/IPTS-26217/shared/E76p2_W65p3.detcal"], ["/SNS/SNAP/IPTS-26217/shared/E76p2_W65p5.detcal"]] must become: "DetCalFilename": "/SNS/SNAP/IPTS-26217/shared/E76p2_W65p3.detcal,/SNS/SNAP/IPTS-26217/shared/E76p2_W65p5.detcal" """ if 'DetCalFilename' in dict_repr: dict_repr['DetCalFilename'] = ','.join( [entry[0] for entry in dict_repr.get('DetCalFilename')]) # # Save to file in JSON format # formatted_pretty = json.dumps(dict_repr, sort_keys=True, indent=4) with open(filename, 'w') as f: f.write(formatted_pretty) logger.information(f'Saving configuration to {filename}') logger.debug(f'Configuration contents:\n{formatted_pretty}')
def PyExec(self): self._setup() self._calculate_parameters() if not self._dry_run: self._output_workspace = self._transform() self._add_logs() else: skip_prog = Progress(self, start=0.3, end=1.0, nreports=2) skip_prog.report('skipping transform') skip_prog.report('skipping add logs') logger.information('Dry run, will not run TransformToIqt') self.setProperty('ParameterWorkspace', self._parameter_table) self.setProperty('OutputWorkspace', self._output_workspace)
def _process_container_workspace(self, container_workspace, prog_container): # Appy container shift if needed if self._shift_can: # Use temp workspace so we don't modify data prog_container.report('Shifting can') shifted_container = self._shift_workspace(container_workspace, self._can_shift_factor) logger.information('Container shifted by %f' % self._can_shift_factor) else: shifted_container = container_workspace # Apply container scale factor if needed if self._scale_can: # Use temp workspace so we don't modify original data prog_container.report('Scaling can') scaled_container = self._convert_units_wavelength(shifted_container * self._can_scale_factor) logger.information('Container scaled by %f' % self._can_scale_factor) return scaled_container else: return self._convert_units_wavelength(shifted_container)
def _discover_python_interfaces(self, interface_dir): """Return a dictionary mapping a category to a set of named Python interfaces""" items = ConfigService['mantidqt.python_interfaces'].split() # list of custom interfaces that are not qt4/qt5 compatible GUI_BLACKLIST = ['Frequency_Domain_Analysis_Old.py'] # detect the python interfaces interfaces = {} for item in items: key, scriptname = item.split('/') if not os.path.exists(os.path.join(interface_dir, scriptname)): logger.warning('Failed to find script "{}" in "{}"'.format(scriptname, interface_dir)) continue if scriptname in GUI_BLACKLIST: logger.information('Not adding gui "{}"'.format(scriptname)) continue interfaces.setdefault(key, []).append(scriptname) return interfaces
def populateAfterMantidImport(self): from mantid.kernel import ConfigService, logger # TODO ConfigService should accept unicode strings interface_dir = ConfigService[str( 'mantidqt.python_interfaces_directory')] items = ConfigService[str('mantidqt.python_interfaces')].split() # list of custom interfaces that have been made qt4/qt5 compatible # TODO need to make *anything* compatible GUI_WHITELIST = ['DGSPlanner.py', 'FilterEvents.py', 'TofConverter.py'] # detect the python interfaces interfaces = {} for item in items: key, scriptname = item.split('/') # TODO logger should accept unicode if not os.path.exists(os.path.join(interface_dir, scriptname)): logger.warning( str('Failed to find script "{}" in "{}"'.format( scriptname, interface_dir))) continue if scriptname not in GUI_WHITELIST: logger.information( str('Not adding gui "{}"'.format(scriptname))) continue temp = interfaces.get(key, []) temp.append(scriptname) interfaces[key] = temp # add the interfaces to the menu keys = list(interfaces.keys()) keys.sort() for key in keys: submenu = self.interfaces_menu.addMenu(key) names = interfaces[key] names.sort() for name in names: action = submenu.addAction( name.replace('.py', '').replace('_', ' ')) script = os.path.join(interface_dir, name) action.triggered.connect(lambda checked, script=script: self. launchCustomGUI(script))
def _validate_castep_input_file(cls, filename_full_path: str) -> dict: """ Check if ab initio input vibrational or phonon file has been produced by CASTEP. Currently the crucial keywords in the first few lines are checked (to be modified if a better validation is found...) :param filename_full_path: full path of a file to check :returns: Dictionary with two entries "Invalid", "Comment". Valid key can have two values: True/ False. As it comes to "Comment" it is an empty string if Valid:True, otherwise stores description of the problem. """ logger.information("Validate CASTEP file with vibrational or phonon data.") msg_err = "Invalid %s file. " % filename_full_path output = cls._validate_ab_initio_file_extension(ab_initio_program="CASTEP", filename_full_path=filename_full_path, expected_file_extension=".phonon") if output["Invalid"]: return output else: # check a structure of the header part of file. # Here fortran convention is followed: case of letter does not matter with open(filename_full_path) as castep_file: line = cls._get_one_line(castep_file) if not cls._compare_one_line(line, "beginheader"): # first line is BEGIN header return dict(Invalid=True, Comment=msg_err + "The first line should be 'BEGIN header'.") line = cls._get_one_line(castep_file) if not cls._compare_one_line(one_line=line, pattern="numberofions"): return dict(Invalid=True, Comment=msg_err + "The second line should include 'Number of ions'.") line = cls._get_one_line(castep_file) if not cls._compare_one_line(one_line=line, pattern="numberofbranches"): return dict(Invalid=True, Comment=msg_err + "The third line should include 'Number of branches'.") line = cls._get_one_line(castep_file) if not cls._compare_one_line(one_line=line, pattern="numberofwavevectors"): return dict(Invalid=True, Comment=msg_err + "The fourth line should include 'Number of wavevectors'.") line = cls._get_one_line(castep_file) if not cls._compare_one_line(one_line=line, pattern="frequenciesin"): return dict(Invalid=True, Comment=msg_err + "The fifth line should be 'Frequencies in'.") return dict(Invalid=False, Comment="")
def _wave_range(self): wave_range = '__WaveRange' ExtractSingleSpectrum(InputWorkspace=self._sample_ws_name, OutputWorkspace=wave_range, WorkspaceIndex=0) Xin = mtd[wave_range].readX(0) wave_min = mtd[wave_range].readX(0)[0] wave_max = mtd[wave_range].readX(0)[len(Xin) - 1] number_waves = int(self._number_wavelengths) wave_bin = (wave_max - wave_min) / (number_waves-1) self._waves = list() for idx in range(0, number_waves): self._waves.append(wave_min + idx * wave_bin) if self._emode == 'Elastic': self._elastic = self._waves[int(number_waves / 2)] elif self._emode == 'Indirect': self._elastic = math.sqrt(81.787 / self._efixed) # elastic wavelength logger.information('Elastic lambda %f' % self._elastic) DeleteWorkspace(wave_range)
def _cache_parameter_data(self): """ Validates and caches data from parameter workspace. """ sample_run = mtd[self._par_ws].getRun() program = sample_run.getLogData('fit_program').value if program != 'ConvFit': raise ValueError('Fit program MUST be ConvFit') self._delta = sample_run.getLogData('delta_function').value logger.information('delta_function : %s' % self._delta) if 'lorentzians' in sample_run: lor = sample_run.getLogData('lorentzians').value self._lor = int(lor) logger.information('lorentzians : %i' % self._lor) else: raise ValueError('Fit MUST be Lorentzians')
def _wave_range(self): if self._emode == 'Efixed': lambda_fixed = math.sqrt(81.787 / self._efixed) self._wavelengths.append(lambda_fixed) logger.information('Efixed mode, setting lambda_fixed to {0}'.format(lambda_fixed)) else: wave_range = '__WaveRange' ExtractSingleSpectrum(InputWorkspace=self._sample_ws_name, OutputWorkspace=wave_range, WorkspaceIndex=0) Xin = mtd[wave_range].readX(0) wave_min = mtd[wave_range].readX(0)[0] wave_max = mtd[wave_range].readX(0)[len(Xin) - 1] number_waves = self._number_wavelengths wave_bin = (wave_max - wave_min) / (number_waves - 1) self._wavelengths = list() for idx in range(0, number_waves): self._wavelengths.append(wave_min + idx * wave_bin) DeleteWorkspace(wave_range, EnableLogging=False)
def should_show_on_startup(): """ Determines if the first time dialog should be shown :return: True if the dialog should be shown """ # first check the facility and instrument facility = ConfigService.getString(AboutPresenter.FACILITY) instrument = ConfigService.getString(AboutPresenter.INSTRUMENT) if not facility: return True else: # check we can get the facility and instrument try: facilityInfo = ConfigService.getFacility(facility) instrumentInfo = ConfigService.getInstrument(instrument) logger.information( "Default facility '{0}', instrument '{1}'\n".format( facilityInfo.name(), instrumentInfo.name())) except RuntimeError: # failed to find the facility or instrument logger.error( "Could not find your default facility '{0}' or instrument '{1}' in facilities.xml, " + "showing please select again.\n".format( facility, instrument)) return True settings = QSettings() settings.beginGroup(AboutPresenter.DO_NOT_SHOW_GROUP) doNotShowUntilNextRelease = settings.value(AboutPresenter.DO_NOT_SHOW, 0, type=int) lastVersion = settings.value(AboutPresenter.PREVIOUS_VERSION, "", type=str) current_version = version().major + "." + version().minor settings.endGroup() if not doNotShowUntilNextRelease: return True # Now check if the version has changed since last time return current_version != lastVersion