def __init__(self, pv_names, readback_pv_names=None, tolerances=None, timeout=None): """ Initialize the write group. :param pv_names: PV names (or name, list or single string) to connect to. :param readback_pv_names: PV names (or name, list or single string) of readback PVs to connect to. :param tolerances: Tolerances to be used for set_and_match. You can also specify them on the set_and_match :param timeout: Timeout to reach the destination. """ self.pv_names = convert_to_list(pv_names) self.pvs = [self.connect(pv_name) for pv_name in self.pv_names] if readback_pv_names: self.readback_pv_name = convert_to_list(readback_pv_names) self.readback_pvs = [self.connect(pv_name) for pv_name in self.readback_pv_name] else: self.readback_pv_name = self.pv_names self.readback_pvs = self.pvs self.tolerances = self._setup_tolerances(tolerances) # We also do not allow timeout to be zero. self.timeout = timeout or self.default_timeout # Verify if all provided lists are of same size. validate_lists_length(self.pvs, self.readback_pvs, self.tolerances) # Check if timeout is int or float. if not isinstance(self.timeout, (int, float)): raise ValueError("Timeout must be int or float, but %s was provided." % self.timeout)
def __init__(self, pv_names): """ Initialize the group. :param pv_names: PV names (or name, list or single string) to connect to. """ self.pv_names = convert_to_list(pv_names) self.pvs = [self.connect(pv_name) for pv_name in self.pv_names]
def set_and_match(self, values, tolerances=None, timeout=None): """ Set the value and wait for the PV to reach it, within tollerance. :param values: Values to set (Must match the number of PVs in this group) :param tolerances: Tolerances for each PV (Must match the number of PVs in this group) :param timeout: Timeout, single value, to wait until the value is reached. :raise ValueError if any position cannot be reached. """ values = convert_to_list(values) if not tolerances: tolerances = self.tolerances else: # We do not allow tolerances to be less than the default tolerance. tolerances = self._setup_tolerances(tolerances) if not timeout: timeout = self.timeout # Verify if all provided lists are of same size. validate_lists_length(self.pvs, values, tolerances) # Check if timeout is int or float. if not isinstance(timeout, (int, float)): raise ValueError("Timeout must be int or float, but %s was provided." % timeout) # Write all the PV values. for pv, value in zip(self.pvs, values): pv.put(value) # Boolean array to represent which PVs have reached their target value.s within_tolerance = [False] * len(self.pvs) initial_timestamp = time.time() # Read values until all PVs have reached the desired value or time has run out. while (not all(within_tolerance)) and (time.time() - initial_timestamp < timeout): # Get only the PVs that have not yet reached the final position. for index, pv, tolerance in ((index, pv, tolerance) for index, pv, tolerance, values_reached in zip(count(), self.readback_pvs, tolerances, within_tolerance) if not values_reached): current_value = pv.get() expected_value = values[index] if compare_channel_value(current_value, expected_value, tolerance): within_tolerance[index] = True time.sleep(self.default_get_sleep) if not all(within_tolerance): error_message = "" # Get the indexes that did not reach the supposed values. for index in [index for index, reached_value in enumerate(within_tolerance) if not reached_value]: expected_value = values[index] pv_name = self.pv_names[index] tolerance = tolerances[index] error_message += "Cannot achieve value %s, on PV %s, with tolerance %s.\n" % \ (expected_value, pv_name, tolerance) raise ValueError(error_message)
def get_positioner(self): """ Generate a positioner for the provided dimensions. :return: Positioner object. """ # Read all the initial positions - in case we need to do an additive scan. initial_positions = self.epics_dal.get_group(READ_GROUP).read() positioners = [] knob_readback_offset = 0 for dimension in self.dimensions: is_additive = bool(dimension.get("Additive", 0)) is_series = bool(dimension.get("Series", 0)) n_knob_readbacks = len(dimension["KnobReadback"]) # This dimension uses relative positions, read the PVs initial state. # We also need initial positions for the series scan. if is_additive or is_series: offsets = convert_to_list(initial_positions[ knob_readback_offset:knob_readback_offset + n_knob_readbacks]) else: offsets = None # Series scan in this dimension, use StepByStepVectorPositioner. if is_series: # In the StepByStep positioner, the initial values need to be added to the steps. positions = convert_to_list(dimension["ScanValues"]) positioners.append( SerialPositioner(positions, initial_positions=offsets, offsets=offsets if is_additive else None)) # Line scan in this dimension, use VectorPositioner. else: positions = convert_to_position_list( convert_to_list(dimension["KnobExpanded"])) positioners.append(VectorPositioner(positions, offsets=offsets)) # Increase the knob readback offset. knob_readback_offset += n_knob_readbacks # Assemble all individual positioners together. positioner = CompoundPositioner(positioners) return positioner
def __init__(self, start, end, n_steps=None, step_size=None, passes=1, offsets=None): self.start = convert_to_list(start) self.end = convert_to_list(end) self.n_steps = n_steps self.step_size = convert_to_list(step_size) self.passes = passes self.offsets = convert_to_list(offsets) self._validate_parameters() # Fix the offsets if provided. if self.offsets: self.start = [ offset + original_value for original_value, offset in zip(self.start, self.offsets) ] self.end = [ offset + original_value for original_value, offset in zip(self.end, self.offsets) ] # Number of steps case. if self.n_steps: self.step_size = [(end - start) / self.n_steps for start, end in zip(self.start, self.end)] # Step size case. elif self.step_size: n_steps_per_axis = [ math.floor((end - start) / step_size) for start, end, step_size in zip(self.start, self.end, self.step_size) ] # Verify that all axis do the same number of steps. if not all(x == n_steps_per_axis[0] for x in n_steps_per_axis): raise ValueError( "The step sizes %s must give the same number of steps for each start %s " "and end % pair." % (self.step_size, self.start, self.end)) # All the elements in n_steps_per_axis must be the same anyway. self.n_steps = n_steps_per_axis[0]
def write(self, values): """ Write the values to the provided functions. :param values: Values to write. """ values = convert_to_list(values) for func, value in zip(self.functions, values): func.call_function(value)
def walk_positioner(index, output_positions): if index == self.n_positioners: yield copy(output_positions) else: for current_positions in self.positioners[index].get_generator( ): yield from walk_positioner( index + 1, output_positions + convert_to_list(current_positions))
def write_data(positions): positions = convert_to_list(positions) pv_values = [x for x, source in zip(positions, writables_order) if source == EPICS_PV] function_values = [x for x, source in zip(positions, writables_order) if source == FUNCTION_VALUE] if epics_writer: epics_writer.set_and_match(pv_values) if function_writer: function_writer.write(function_values)
def get_generator(self): for _ in range(self.passes): # For each axis. for axis_index in range(self.n_axis): current_state = copy(self.initial_positions) n_steps_in_axis = len(self.positions[axis_index]) for axis_position_index in range(n_steps_in_axis): current_state[axis_index] = convert_to_list( self.positions[axis_index])[axis_position_index] yield copy(current_state)
def __init__(self, start, end, n_steps=None, step_size=None, passes=1, offsets=None): self.start = convert_to_list(start) self.end = convert_to_list(end) self.n_steps = convert_to_list(n_steps) self.step_size = convert_to_list(step_size) self.passes = passes self.offsets = convert_to_list(offsets) self._validate_parameters() # Get the number of axis to scan. self.n_axis = len(self.start) # Fix the offsets if provided. if self.offsets: self.start = [ offset + original_value for original_value, offset in zip(self.start, self.offsets) ] self.end = [ offset + original_value for original_value, offset in zip(self.end, self.offsets) ] # Number of steps case. if self.n_steps: self.step_size = [(end - start) / steps for start, end, steps in zip( self.start, self.end, self.n_steps)] # Step size case. elif self.step_size: self.n_steps = [ math.floor((end - start) / step_size) for start, end, step_size in zip(self.start, self.end, self.step_size) ]
def _setup_tolerances(self, tolerances): """ Construct the list of tolerances. No tolerance can be less then the minimal tolerance. :param tolerances: Input tolerances. :return: Tolerances adjusted to the minimum value, if needed. """ # If the provided tolerances are empty, substitute them with a list of default tolerances. tolerances = convert_to_list(tolerances) or [config.max_float_tolerance] * len(self.pvs) # Each tolerance needs to be at least the size of the minimum tolerance. tolerances = [max(config.max_float_tolerance, tolerance) for tolerance in tolerances] return tolerances
def __init__(self, properties, conditions=None, host=None, port=None, filter_function=None): """ Create the bsread group read interface. :param properties: List of PVs to read for processing. :param conditions: List of PVs to read as conditions. :param filter_function: Filter the BS stream with a custom function. """ self.host = host self.port = port self.properties = convert_to_list(properties) self.conditions = convert_to_list(conditions) self.filter = filter_function self._message_cache = None self._message_cache_timestamp = None self._connect_bsread(config.bs_default_host, config.bs_default_port)
def _convert_steps_parameter(steps): n_steps = None step_size = None steps_list = convert_to_list(steps) # If steps is a float or a list of floats, then this are step sizes. if isinstance(steps_list[0], float): step_size = steps_list # If steps is an int, this is the number of steps. elif isinstance(steps, int): n_steps = steps return n_steps, step_size
def _generate_scan_parameters(relative, writables, latency): # If the scan is relative we collect the initial writables offset, and restore the state at the end of the scan. offsets = None finalization_action = [] if relative: pv_names = [x.pv_name for x in convert_to_list(writables) or []] reader = EPICS_READER(pv_names) offsets = reader.read() reader.close() finalization_action.append(action_restore(writables)) settings = scan_settings(settling_time=latency) return offsets, finalization_action, settings
def action_restore(writables): """ Restore the initial state of the writable PVs. :return: Empty tuple, to be replaced with the initial values. """ writables = convert_input(convert_to_list(writables)) pv_names = [pv.pv_name for pv in writables] readback_pv_names = [pv.readback_pv_name for pv in writables] tolerances = [pv.tolerance for pv in writables] # Get the initial values. reader = EPICS_READER(pv_names) initial_values = reader.read() reader.close() def execute(): writer = EPICS_WRITER(pv_names, readback_pv_names, tolerances) writer.set_and_match(initial_values) writer.close() return execute
def scanner(positioner, readables, writables=None, conditions=None, before_read=None, after_read=None, initialization=None, finalization=None, settings=None, data_processor=None, before_move=None, after_move=None): # Allow a list or a single value to be passed. Initialize None values. writables = convert_input(convert_to_list(writables) or []) readables = convert_input(convert_to_list(readables) or []) conditions = convert_conditions(convert_to_list(conditions) or []) before_read = convert_to_list(before_read) or [] after_read = convert_to_list(after_read) or [] before_move = convert_to_list(before_move) or [] after_move = convert_to_list(after_move) or [] initialization = convert_to_list(initialization) or [] finalization = convert_to_list(finalization) or [] settings = settings or scan_settings() # TODO: Ugly. The scanner should not depend on a particular positioner implementation. if isinstance(positioner, BsreadPositioner) and settings.n_measurements > 1: raise ValueError( "When using BsreadPositioner the maximum number of n_measurements = 1." ) bs_reader = _initialize_bs_dal(readables, conditions, settings.bs_read_filter, positioner) epics_writer, epics_pv_reader, epics_condition_reader = _initialize_epics_dal( writables, readables, conditions, settings) function_writer, function_reader, function_condition = _initialize_function_dal( writables, readables, conditions) writables_order = [type(writable) for writable in writables] # Write function needs to merge PV and function proxy data. def write_data(positions): positions = convert_to_list(positions) pv_values = [ x for x, source in zip(positions, writables_order) if source == EPICS_PV ] function_values = [ x for x, source in zip(positions, writables_order) if source == FUNCTION_VALUE ] if epics_writer: epics_writer.set_and_match(pv_values) if function_writer: function_writer.write(function_values) # Order of value sources, needed to reconstruct the correct order of the result. readables_order = [type(readable) for readable in readables] # Read function needs to merge BS, PV, and function proxy data. def read_data(current_position_index, retry=False): _logger.debug("Reading data for position index %s." % current_position_index) bs_values = iter( bs_reader.read(current_position_index, retry) if bs_reader else []) epics_values = iter( epics_pv_reader.read(current_position_index ) if epics_pv_reader else []) function_values = iter( function_reader.read(current_position_index ) if function_reader else []) # Interleave the values correctly. result = [] for source in readables_order: if source == BS_PROPERTY: next_result = next(bs_values) elif source == EPICS_PV: next_result = next(epics_values) elif source == FUNCTION_VALUE: next_result = next(function_values) else: raise ValueError("Unknown type of readable %s used." % source) # We flatten the result, whenever possible. if isinstance(next_result, list) and source != FUNCTION_VALUE: result.extend(next_result) else: result.append(next_result) return result # Order of value sources, needed to reconstruct the correct order of the result. conditions_order = [type(condition) for condition in conditions] # Validate function needs to validate both BS, PV, and function proxy data. def validate_data(current_position_index, data): _logger.debug("Reading data for position index %s." % current_position_index) bs_values = iter( bs_reader.read_cached_conditions() if bs_reader else []) epics_values = iter( epics_condition_reader.read(current_position_index ) if epics_condition_reader else []) function_values = iter( function_condition.read(current_position_index ) if function_condition else []) for index, source in enumerate(conditions_order): if source == BS_CONDITION: value = next(bs_values) elif source == EPICS_CONDITION: value = next(epics_values) elif source == FUNCTION_CONDITION: value = next(function_values) else: raise ValueError("Unknown type of condition %s used." % source) value_valid = False # Function conditions are self contained. if source == FUNCTION_CONDITION: if value: value_valid = True else: expected_value = conditions[index].value tolerance = conditions[index].tolerance operation = conditions[index].operation if compare_channel_value(value, expected_value, tolerance, operation): value_valid = True if not value_valid: if conditions[index].action == ConditionAction.Retry: return False if source == FUNCTION_CONDITION: raise ValueError("Function condition %s returned False." % conditions[index].identifier) else: raise ValueError( "Condition %s failed, expected value %s, actual value %s, " "tolerance %s, operation %s." % (conditions[index].identifier, conditions[index].value, value, conditions[index].tolerance, conditions[index].operation)) return True if not data_processor: data_processor = DATA_PROCESSOR() # Before acquisition hook. before_measurement_executor = None if before_read: before_measurement_executor = ACTION_EXECUTOR(before_read).execute # After acquisition hook. after_measurement_executor = None if after_read: after_measurement_executor = ACTION_EXECUTOR(after_read).execute # Executor before each move. before_move_executor = None if before_move: before_move_executor = ACTION_EXECUTOR(before_move).execute # Executor after each move. after_move_executor = None if after_move: after_move_executor = ACTION_EXECUTOR(after_move).execute # Initialization (before move to first position) hook. initialization_executor = None if initialization: initialization_executor = ACTION_EXECUTOR(initialization).execute # Finalization (after last acquisition AND on error) hook. finalization_executor = None if finalization: finalization_executor = ACTION_EXECUTOR(finalization).execute scanner = Scanner(positioner=positioner, data_processor=data_processor, reader=read_data, writer=write_data, before_measurement_executor=before_measurement_executor, after_measurement_executor=after_measurement_executor, initialization_executor=initialization_executor, finalization_executor=finalization_executor, data_validator=validate_data, settings=settings, before_move_executor=before_move_executor, after_move_executor=after_move_executor) return scanner
def __init__(self, functions): """ Initialize the function dal. :param functions: List (or single item) of FUNCTION_VALUE type. """ self.functions = convert_to_list(functions)
def vscan(writables, readables, vector, line=False, latency=0.0, relative=False, passes=1, zigzag=False, before_read=None, after_read=None, title=None): """Vector Scan: positioners change following values provided in a vector. Args: writables(list of Writable): Positioners set on each step. readables(list of Readable): Sensors to be sampled on each step. vector(list of list of float): table of positioner values. line (bool, optional): if true, processs as line scan (1d) relative (bool, optional): if true, start and end positions are relative to current at start of the scan latency(float, optional): settling time for each step before readout, defaults to 0.0. passes(int, optional): number of passes zigzag(bool, optional): if true writables invert direction on each pass. before_read (function, optional): callback on each step, before each readout. after_read (function, optional): callback on each step, after each readout. title(str, optional): plotting window name. Returns: ScanResult object. """ offsets, finalization_actions, settings = _generate_scan_parameters( relative, writables, latency) # The compound positioner does not allow you to do zigzag positioning. if not line and zigzag: raise ValueError("Area vector scan cannot use zigzag positioning.") if zigzag: positioner_class = ZigZagVectorPositioner else: positioner_class = VectorPositioner # If the vector is treated as a line scan, move all motors to the next position at the same time. if line: positioner = positioner_class(positions=vector, passes=passes, offsets=offsets) # The vector is treated as an area scan. Move motors one by one, covering all positions. else: vector = convert_to_list(vector) if not all(isinstance(x, list) for x in vector): raise ValueError( "In case of area scan, a list of lists is required for a vector." ) positioner = CompoundPositioner([ VectorPositioner(positions=x, passes=passes, offsets=offsets) for x in vector ]) result = scan(positioner, readables, writables, before_read=before_read, after_read=after_read, settings=settings, finalization=finalization_actions) return result