def load_setting_float(qsettings, param_name, default_value): """ load setting as an integer :param qsettings: :param param_name: :param default_value: :return: """ # check assert isinstance(qsettings, QtCore.QSettings), 'Input settings must be a QSetting instance but not {0}.' \ ''.format(type(qsettings)) datatypeutility.check_float_variable('Default float setting', default_value, (None, None)) float_value_str = qsettings.value(param_name, default_value) if isinstance(float_value_str, QVariant): float_value = float_value_str.toFloat() else: # case as Unicode print('[DB...BAT] QSetting {}: {} is of type {}'.format( param_name, float_value_str, type(float_value_str))) try: float_value = int(str(float_value_str)) except (TypeError, ValueError): float_value = None # use default if float_value is None: float_value = default_value return float_value
def strip_v_peaks(self, bank_id, peak_fwhm, pos_tolerance, background_type, is_high_background): """ Strip vanadium peaks Note: result is stored in _striped_peaks_ws_dict :param bank_id: :param peak_fwhm: :param pos_tolerance: :param background_type: :param is_high_background: :return: """ datatypeutility.check_int_variable('Bank ID', bank_id, (1, 99)) datatypeutility.check_int_variable('FWHM (number of pixels)', peak_fwhm, (1, 100)) datatypeutility.check_float_variable('Peak position tolerance', pos_tolerance, (0, None)) raw_van_ws = mantid_helper.retrieve_workspace(self._van_workspace_name) if mantid_helper.is_workspace_group(self._van_workspace_name): input_ws_name = raw_van_ws[bank_id-1].name() bank_list = [1] else: input_ws_name = self._van_workspace_name bank_list = [bank_id] output_ws_name = input_ws_name + '_NoPeak' mantid_helper.strip_vanadium_peaks(input_ws_name=input_ws_name, output_ws_name=output_ws_name, bank_list=bank_list, binning_parameter=None, # PEAK FWHM must be integer (legacy) fwhm=peak_fwhm, peak_pos_tol=pos_tolerance, background_type=background_type, is_high_background=is_high_background) self._striped_peaks_ws_dict[bank_id] = output_ws_name return output_ws_name
def set_time_slicer(self, start_time, time_step, stop_time): """ :return: """ # self._mtdWorkspaceName # Check inputs if start_time is not None: datatypeutility.check_float_variable('Event filters starting time', start_time, (0., None)) if stop_time is not None: datatypeutility.check_float_variable( 'Event filtering stopping time', stop_time, (1.E-10, None)) if start_time is not None and stop_time is not None and start_time >= stop_time: raise RuntimeError( 'User specified event filters starting time {} is after stopping time {}' ''.format(start_time, stop_time)) if start_time is None and stop_time is None and time_step is None: raise RuntimeError( 'It is not allowed to give all 3 Nones. Generate events filter by time ' 'must specify at least one of min_time, max_time and time_interval' ) # define tag tag = 'TimeSlicer_%06d' % self._myRunNumber # define output workspace splitter_ws_name = tag info_ws_name = tag + '_Info' assert self._meta_ws_name is not None, 'Mantid workspace has not been loaded yet.' status, message = mantid_helper.generate_event_filters_by_time( self._meta_ws_name, splitter_ws_name, info_ws_name, start_time, stop_time, time_step, 'Seconds') # return with error message if status is False: return status, message # set up splitter record self._chopSetupDict[tag] = { 'start': start_time, 'step': time_step, 'stop': stop_time, 'splitter': splitter_ws_name, 'info': info_ws_name } # user tag to serve as slicer key slicer_key = tag return True, slicer_key
def parse_float(line_edit, allow_blank=True, default=None): """ Parse a line edit as a float number :param line_edit: :param allow_blank: if true, then return None if there is no string written in the LineEdit :param default: default value. If None, then set as blank still :return: float or None (or blank) """ # Check input assert(isinstance(line_edit, QLineEdit)), 'Input shall be a QLineEdit instance but not a {}'.format(type(line_edit)) str_value = str(line_edit.text()).strip() input_invalid = False float_value = None error_msg = 'Logic error' if len(str_value) == 0: input_invalid = True error_msg = 'Blank editor' else: try: float_value = float(str_value) except ValueError as e: input_invalid = True error_msg = '{} cannot be converted to float: {}'.format(str_value, e) # END-IF # if input is not valid if input_invalid and allow_blank: if default is not None: datatypeutility.check_float_variable('Default value of QLineEdit', default, (None, None)) line_edit.setText('{}'.format(default)) float_value = default else: float_value = None elif input_invalid: # raise Error! raise RuntimeError(error_msg) return float_value
def set_x_range(self, min_x, max_x): """ set range on X-axis :param min_x: :param max_x: :return: """ datatypeutility.check_float_variable('Lower limit of X for plot', min_x, (None, None)) datatypeutility.check_float_variable('Upper limit of X for plot', max_x, (None, None)) if min_x >= max_x: raise RuntimeError( 'Lower X limit {} cannot be equal to or larger than upper X limit {}' ''.format(min_x, max_x)) self._x_min = min_x self._x_max = max_x self.ui.lineEdit_xMin.setText('{}'.format(min_x)) self.ui.lineEdit_xMax.setText('{}'.format(max_x)) return
def set_overlap_time_slicer(self, start_time, stop_time, time_interval, overlap_time_interval, splitter_tag=None): """ set slicers for constant time period with overlapping will be t0, t0 + dbin t0 + dt, t0 + dbin + dt ... ... :param start_time: :param stop_time: :param time_interval: :param overlap_time_interval: :return: """ # Check inputs if start_time is not None: datatypeutility.check_float_variable('Event filters starting time', start_time, (0., None)) if stop_time is not None: datatypeutility.check_float_variable( 'Event filtering stopping time', stop_time, (1.E-10, None)) if start_time is not None and stop_time is not None and start_time >= stop_time: raise RuntimeError( 'User specified event filters starting time {} is after stopping time {}' ''.format(start_time, stop_time)) datatypeutility.check_float_variable('Time interval', time_interval, (0., None)) datatypeutility.check_float_variable('Overlap time interval', overlap_time_interval, (0., None)) if time_interval <= overlap_time_interval: raise RuntimeError( 'Time step/interval {} cannot be equal or less than overlapped time period ' '{}'.format(time_interval, overlap_time_interval)) # create time bins if start_time is None: start_time = 0 if stop_time is None: stop_time = mantid_helper.get_run_stop(self._meta_ws_name, 'second', is_relative=True) print('[DB...BAT] Run stop = {}'.format(stop_time)) split_list = list() split_t0 = start_time split_tf = -1 while split_tf < stop_time: # get split stop time split_tf = split_t0 + time_interval if split_tf > stop_time: split_tf = stop_time + 1.E-10 # add to list split_list.append((split_t0, split_tf)) # advance the start time split_t0 += overlap_time_interval # END-WHILE # Determine tag if splitter_tag is None: splitter_tag = get_standard_manual_tag(self._meta_ws_name) # Generate split workspaces splitter_tag_list = list() for i_split, split_tup in enumerate(split_list): splitter_tag_i = splitter_tag + '_{:05}'.format(i_split) splitter_info_i = splitter_tag_i + '_info' status, message = mantid_helper.generate_event_filters_by_time( self._meta_ws_name, splitter_tag_i, splitter_info_i, split_tup[0], split_tup[1], delta_time=None, time_unit='second') if not status: return False, message # good splitter_tag_list.append(splitter_tag_i) self._chopSetupDict[splitter_tag_i] = { 'splitter': splitter_tag_i, 'info': splitter_info_i } # END-FOR return True, splitter_tag_list
def locate_cycle_boundaries(self, raw_ws_name, smoothed_ws_name, x_start, x_stop, cycle_local_max_lower_limit, num_neighbors, trust_start_stop): def check_statistic(max_x_vector, max_y_vector, level): diff_max_x_vec = max_x_vector[1:] - max_x_vector[:-1] std_dev = numpy.std(diff_max_x_vec) avg_cycle_time = numpy.average(diff_max_x_vec) false_indexes = numpy.where( diff_max_x_vec < numpy.std(diff_max_x_vec))[0] msg = 'Cycle time = {} +/- {}\nFalse local maxima: {}, {}' \ ''.format(avg_cycle_time, std_dev, max_x_vector[false_indexes], max_y_vector[false_indexes]) print('[{}]: {}'.format(level.upper(), msg)) return avg_cycle_time, std_dev # check inputs datatypeutility.check_float_variable('Starting time of cycles', x_start, (0, None)) datatypeutility.check_float_variable('Stopping time of cycles', x_stop, (0, None)) if x_start >= x_stop: raise RuntimeError( 'Starting time {} cannot be equal to later than stopping time {}' ''.format(x_start, x_stop)) # get workspaces raw_ws = mantid_helper.retrieve_workspace(raw_ws_name, True) smooth_ws = mantid_helper.retrieve_workspace(smoothed_ws_name, True) # use smoothed workspace to locate maxima first vec_x = smooth_ws.readX(0) vec_y = smooth_ws.readY(0) raw_vec_times = raw_ws.readX(0) raw_vec_value = raw_ws.readY(0) # determine start and stop indexes start_index = numpy.searchsorted(vec_x, x_start) stop_index = numpy.searchsorted(vec_x, x_stop, 'right') print('[INFO] Start X = {}, Y = {}, Index = {}'.format( vec_x[start_index], vec_y[start_index], start_index)) print('[INFO] Stap X = {}, Y = {}, Index = {}'.format( vec_x[stop_index], vec_y[stop_index], stop_index)) # Step 1: use smoothed data to find local maxima: use 'argrelextrema' to find local maxima # check Y only # roi_vec_x = vec_x[start_index:stop_index] roi_vec_y = vec_y[start_index:stop_index] roi_maxima_indexes = argrelextrema(roi_vec_y, numpy.greater) roi_maxima_indexes = roi_maxima_indexes[0] # get to list print('[DEBUG] maximum indexes (in ROI arrays): {}'.format( roi_maxima_indexes)) # convert to the raw local_maxima_indexes = roi_maxima_indexes + start_index # there are a lot of local maxima from signal noise: filter out the small values max_y_vector = raw_vec_value[ local_maxima_indexes] # log values of local maxima # indexes for max Y vector y_indexes = numpy.where(max_y_vector > cycle_local_max_lower_limit) local_maxima_indexes = local_maxima_indexes[y_indexes] maxima_times_vec = raw_vec_times[ local_maxima_indexes] # times for local maxima # equivalent to: max_x_vector = max_x_vector[y_indexes] maxima_value_vec = raw_vec_value[ local_maxima_indexes] # log values of local maxima # equivalent to: max_y_vector = max_y_vector[y_indexes] # print ('Filtered indexes: {}'.format(max_index_vector)) check_statistic(maxima_times_vec, maxima_value_vec, level='debug') # Step 2: map from smoothed data to raw data (real maxima) max_index_set = set() for max_index_i in local_maxima_indexes: # search the nearby N = 5 points i_start = max_index_i - num_neighbors i_stop = max_index_i + num_neighbors max_index_i = numpy.argmax(raw_vec_value[i_start:i_stop]) max_index_set.add(max_index_i + i_start) # END-FOR # convert to vector: set the max_index_set back to local_maxima_indexes local_maxima_indexes = numpy.array( sorted(list(max_index_set)) ) # this local_maxima_indexes is optimized from previous local_maxima_indexes maxima_times_vec = raw_vec_times[local_maxima_indexes] maxima_value_vec = raw_vec_value[local_maxima_indexes] # check avg_cycle_time, std_dev = check_statistic(maxima_times_vec, maxima_value_vec, 'info') # create a workspace CreateWorkspace(DataX=maxima_times_vec, DataY=maxima_value_vec, NSpec=1, OutputWorkspace='debug_maxima') if maxima_times_vec.shape[0] < 2: raise RuntimeError( 'Only found {} local maxima. Unable to proceed'.format( maxima_times_vec.shape[0])) # Step 3: find (real) minima by finding minimum between 2 neighboring local maxima local_minima_indexes = numpy.ndarray(shape=(maxima_value_vec.shape[0] + 1, ), dtype='int64') for i_cycle in range(len(local_maxima_indexes) - 1): # locate the minima start_index_i = local_maxima_indexes[i_cycle] stop_index_i = local_maxima_indexes[i_cycle + 1] print('# index: start = {}, stop = {}, # points = {}'.format( start_index_i, stop_index_i, stop_index_i - start_index_i)) vec_x_i = raw_vec_times[start_index_i:stop_index_i] vec_y_i = raw_vec_value[start_index_i:stop_index_i] print('[DEBUG] Cycle {}: Start @ {}, {}, Stop @ {}, {}' ''.format(i_cycle, vec_x_i[0], vec_y_i[0], vec_x_i[-1], vec_y_i[-1])) # find local minima min_index_i = numpy.argmin(vec_y_i) print( '[DEBUG] {}-th Local minimum: X = {}, Y = {} @ index = {} ... total index = {}' ''.format(i_cycle + 1, vec_x_i[min_index_i], vec_y_i[min_index_i], min_index_i, start_index_i + min_index_i)) # store the result local_minima_indexes[i_cycle + 1] = start_index_i + min_index_i # END-FOR # add the first and last local minimum as the cycle starts and ends at lower temperature cycle_indexes_size = local_minima_indexes[2] - local_minima_indexes[1] if trust_start_stop: start_cycle_index = numpy.searchsorted( raw_vec_times[0:local_maxima_indexes[0]], x_start, 'right') local_minima_indexes[0] = start_cycle_index end_cycle_index = numpy.searchsorted( raw_vec_times[local_maxima_indexes[-1]:], x_stop, 'left') local_minima_indexes[ -1] = end_cycle_index + local_maxima_indexes[-1] else: # use the 1st (i=1) local minimum time to determine the start (i=0) minimum_1_time = raw_vec_times[local_minima_indexes[1]] estimated_start_time = minimum_1_time - avg_cycle_time start_cycle_index = numpy.searchsorted( raw_vec_times[( local_minima_indexes[1] - int(1.01 * cycle_indexes_size)):local_maxima_indexes[0]], estimated_start_time, 'right') assert isinstance(start_cycle_index, int), '{}'.format(type(start_cycle_index)) local_minima_indexes[0] = start_cycle_index + \ (local_minima_indexes[1] - int(1.01 * cycle_indexes_size)) # use the last local minimum (i = -1) print(local_minima_indexes[-1], local_minima_indexes[-2]) estimated_stop_time = raw_vec_times[ local_minima_indexes[-2]] + avg_cycle_time print('stop time: ', estimated_stop_time) end_cycle_index = numpy.searchsorted( raw_vec_times[local_maxima_indexes[-1]:( local_minima_indexes[-2] + int(1.01 * cycle_indexes_size))], estimated_stop_time, 'left') local_minima_indexes[ -1] = end_cycle_index + local_maxima_indexes[-1] # END-IF # create a workspace minima_times_vec = raw_vec_times[local_minima_indexes] minima_value_vec = raw_vec_value[local_minima_indexes] CreateWorkspace(DataX=minima_times_vec, DataY=minima_value_vec, NSpec=1, OutputWorkspace='debug_minima') return local_minima_indexes, local_maxima_indexes