def __init__( self, config_dir: typing.Union[str, pathlib.Path, None] = None, initial_state: salobj.State = salobj.State.STANDBY, override: str = "", simulation_mode: int = 0, ) -> None: super().__init__( name="ATMonochromator", index=0, config_schema=CONFIG_SCHEMA, config_dir=config_dir, initial_state=initial_state, override=override, simulation_mode=simulation_mode, ) self.model = Model(self.log) self.want_connection = False self.health_monitor_task = utils.make_done_future() self._mock_ctrl: typing.Optional[MockController] = None self.connect_task = utils.make_done_future()
def __init__( self, *, log, ConfigClass, TelemetryClass, host, command_port, telemetry_port, connect_callback, config_callback, telemetry_callback, connect_timeout=10, ): self.log = log.getChild("BaseMockController") self.header = structs.Header() self.config = ConfigClass() self.telemetry = TelemetryClass() self.host = host self.command_port = command_port self.telemetry_port = telemetry_port self.connect_callback = connect_callback self.config_callback = config_callback self.telemetry_callback = telemetry_callback self.connect_timeout = connect_timeout # Resource lock for writing a command to the command stream # and waiting for acknowledgement. self._command_lock = asyncio.Lock() self.command_reader = None self.command_writer = None # not written self.telemetry_reader = None # not read self.telemetry_writer = None self.should_be_connected = False # The state of (command_connected, telemetry_connected) # the last time connect_callback was called. self._was_connected = (False, False) # Hold on to the last command, in order to increment the command # counter with correct wraparound. # An index generator would also work, but this avoids duplicating # information about the type of the command counter field. # Start from -1 so that the first command has counter=0. self._last_command = structs.Command() self._last_command.counter -= 1 # Task set to None when config is first seen and handled by # config_callback, or the exception if config_callback raises. self.configured_task = asyncio.Future() # Task used by next_telemetry to detect when the next telemetry # is read. self._telemetry_task = asyncio.Future() # Task used to wait for a command acknowledgement self._read_command_status_task = utils.make_done_future() self._read_telemetry_and_config_task = utils.make_done_future() self.connect_task = asyncio.create_task(self.connect())
def __init__(self, initial_state=salobj.State.STANDBY): super().__init__( name="ATPneumatics", index=0, initial_state=initial_state, simulation_mode=1 ) self.telemetry_interval = 1.0 """Interval between telemetry updates (sec)""" self._closeM1CoversTask = utils.make_done_future() self._openM1CoversTask = utils.make_done_future() self._closeCellVentsTask = utils.make_done_future() self._openCellVentsTask = utils.make_done_future() self.configure() self.initialize()
def __init__( self, config_dir=None, initial_state=salobj.State.STANDBY, override="", ): super().__init__( name="MTDomeTrajectory", config_schema=CONFIG_SCHEMA, config_dir=config_dir, index=None, initial_state=initial_state, override=override, simulation_mode=0, ) # Telescope target, from the MTMount target event; # an ElevationAzimuth; None before a target is seen. self.telescope_target = None # Next telescope target, eventually from the scheduler; # an ElevationAzimuth; None before the next target is seen; self.next_telescope_target = None # Tasks that start dome azimuth and elevation motion # and wait for the motionState and target events # that indicate the motion has started. # While one of these is running that axis will not be commanded. # This avoids the problem of new telescope target events # causing unwanted motion when the dome has been commanded # but has not yet had a chance to report the fact. self.move_dome_azimuth_task = utils.make_done_future() self.move_dome_elevation_task = utils.make_done_future() # Task that is set to (moved_elevation, moved_azimuth) # whenever the follow_target method runs. self.follow_task = asyncio.Future() self.mtmount_remote = salobj.Remote(domain=self.domain, name="MTMount", include=["target"]) self.dome_remote = salobj.Remote( domain=self.domain, name="MTDome", include=["azMotion", "azTarget", "elMotion", "elTarget"], ) self.mtmount_remote.evt_target.callback = self.update_mtmount_target
def __init__( self, index, config_dir=None, initial_state=salobj.State.STANDBY, override="", simulation_mode=0, ): index = constants.SalIndex(index) self._simulator = None self.device = None self.serial_number = constants.SERIAL_NUMBERS[index] # Short name of instrument self.band_name = constants.BAND_NAMES[index] self.generator_name = f"fiberSpec{self.band_name}" self.s3bucket_name = None # Set by `configure`. self.s3bucket = None # Set by `handle_summary_state`. self.data_manager = dataManager.DataManager( instrument=f"FiberSpectrograph.{self.band_name}", origin=type(self).__name__ ) self.telemetry_loop_task = utils.make_done_future() self.telemetry_interval = 10 # seconds between telemetry output super().__init__( name="FiberSpectrograph", index=index, config_schema=CONFIG_SCHEMA, config_dir=config_dir, initial_state=initial_state, override=override, simulation_mode=simulation_mode, )
def __init__(self, index): print(f"TopicWriter: starting with index={index}") for cmd in ("setArrays", "setScalars", "wait"): setattr(self, f"do_{cmd}", self._unsupported_command) self.write_task = utils.make_done_future() self.is_log_level = False super().__init__(name="Test", index=index)
def __init__( self, name: str, index: typing.Optional[int], config_schema: typing.Dict[str, typing.Any], config_dir: typing.Union[str, pathlib.Path, None] = None, initial_state: State = State.STANDBY, override: str = "", simulation_mode: int = 0, ) -> None: self.site = os.environ.get("LSST_SITE") if self.site is None: raise RuntimeError("Environment variable $LSST_SITE not defined.") try: assert config_schema is not None # Make mypy happy self.config_validator = StandardValidator(schema=config_schema) title = config_schema["title"] name_version = title.rsplit(" ", 1) if len(name_version) != 2 or not name_version[1].startswith("v"): raise ValueError( f"Schema title {title!r} must end with ' v<version>'") self.schema_version = name_version[1] except Exception as e: raise ValueError(f"Schema {config_schema} invalid: {e!r}") from e if config_dir is None: config_dir = self._get_default_config_dir(name) else: config_dir = pathlib.Path(config_dir) if not config_dir.is_dir(): raise ValueError( f"config_dir={config_dir} does not exist or is not a directory" ) self.config_dir = config_dir # Interval between reading the config dir (seconds) self.read_config_dir_interval = 1 self.read_config_dir_task = utils.make_done_future() super().__init__( name=name, index=index, initial_state=initial_state, override=override, simulation_mode=simulation_mode, ) # Set static fields of the generic configuration events. # Neither event is complete, so don't put the information yet. for evt in ( self.evt_configurationsAvailable, # type: ignore self.evt_configurationApplied, # type: ignore ): evt.set( # type: ignore schemaVersion=self.schema_version, url=self.config_dir.as_uri())
def __init__( self, initial_state, initial_elevation=0, elevation_velocity=3, azimuth_velocity=3, azimuth_acceleration=1, ): self.elevation_actuator = simactuators.PointToPointActuator( min_position=0, max_position=90, speed=elevation_velocity, start_position=initial_elevation, ) self.azimuth_actuator = simactuators.CircularTrackingActuator( max_velocity=azimuth_velocity, max_acceleration=azimuth_acceleration, dtmax_track=0, ) self.telemetry_interval = 0.2 # seconds self.telemetry_loop_task = utils.make_done_future() self.elevation_done_task = utils.make_done_future() self.azimuth_done_task = utils.make_done_future() # Add do methods for unsupported commands for name in ( "closeLouvers", "closeShutter", "crawlAz", "crawlEl", "exitFault", "openShutter", "park", "resetDrivesAz", "setLouvers", "setOperationalMode", "setTemperature", "setZeroAz", ): setattr(self, f"do_{name}", self._unsupportedCommand) super().__init__(name="MTDome", index=None, initial_state=initial_state)
def __init__(self, script_log_level, **kwargs): super().__init__(name="ScriptQueue", **kwargs) self.script_log_level = script_log_level self.help_dict[ "add"] = f"""type path config options # add a script to the end of the queue: • type = s or std for standard, e or ext for external • config = @yaml_path or keyword1=value1 keyword2=value2 ... where yaml_path is the path to a yaml file; the .yaml suffix is optional • options can be any of the following, in any order (but all option args must follow all config args): -location=int # first=0, last=1, before=2, after=3 -locationSalIndex=int -logLevel=int # error=40, warning=30, info=20, debug=10; default is {script_log_level} -pauseCheckpoint=str # a regex -stopCheckpoint=str # a regex • examples: add s auxtel/slew_telescope_icrs.py ra=10 dec=0 -location=1 add s auxtel/slew_telescope_icrs.py @target -logLevel=10 -location=0 """ # Default options for the add command self.default_add_options = dict( location=Location.LAST, locationSalIndex=0, logLevel=self.script_log_level, pauseCheckpoint="", stopCheckpoint="", ) self.help_dict["showSchema"] = "type path # type=s, std, e, or ext" self.help_dict[ "stopScripts"] = "sal_index1 [sal_index2 [... sal_indexN]] terminate (0 or 1)" self.script_remote = salobj.Remote( domain=self.domain, name="Script", index=0, readonly=True, include=["heartbeat", "logMessage", "state"], ) self.script_remote.evt_logMessage.callback = self.script_log_message self.script_remote.evt_state.callback = self.script_state self.script_remote.evt_heartbeat.callback = self.script_heartbeat # Dict of "type" argument: isStandard self.script_type_dict = dict(s=True, std=True, standard=True, e=False, ext=False, external=False) # SAL index of script whose heartbeat is being monitored; # this should be the currently executing script. self._script_to_monitor = 0 self.script_heartbeat_monitor_task = make_done_future()
def __init__( self, name: str, index: typing.Optional[int] = None, initial_state: State = State.STANDBY, override: str = "", simulation_mode: int = 0, ) -> None: # Check class variables if not hasattr(self, "version"): raise RuntimeError("Class variable `version` is required") if self.valid_simulation_modes is None: raise RuntimeError( "Class variable `valid_simulation_modes` cannot be None") # Cast initial_state from an int or State to a State, # and reject invalid int values with ValueError initial_state = State(initial_state) if initial_state == State.FAULT: raise ValueError("initial_state cannot be FAULT") if simulation_mode not in self.valid_simulation_modes: raise ValueError( f"simulation_mode={simulation_mode} " f"not in valid_simulation_modes={self.valid_simulation_modes}") self._override = override self._summary_state = State(self.default_initial_state) self._initial_state = initial_state self._faulting = False self._heartbeat_task = utils.make_done_future() # Interval between heartbeat events (sec) self.heartbeat_interval = HEARTBEAT_INTERVAL # Postpone assigning command callbacks until `start` is done (but call # assert_do_methods_present to fail early if there is a problem). super().__init__(name=name, index=index, do_callbacks=True) # Set evt_simulationMode, now that it is available. self.evt_simulationMode.set(mode=int(simulation_mode)) # type: ignore def format_version(version: typing.Optional[str]) -> str: return "?" if version is None else version self.evt_softwareVersions.set( # type: ignore salVersion=format_version(self.salinfo.metadata.sal_version), xmlVersion=format_version(self.salinfo.metadata.xml_version), openSpliceVersion=os.environ.get("OSPL_RELEASE", "?"), cscVersion=getattr(self, "version", "?"), )
def __init__( self, config_dir=None, initial_state=salobj.base_csc.State.STANDBY, override="", ): # Commanded dome azimuth (deg), from the ATDome azimuthCommandedState # event; None before the event is seen. self.dome_target_azimuth = None # Telescope target, from the ATMCS target event; # an ElevationAzimuth; None before a target is seen. self.telescope_target = None # Task that starts dome azimuth motion # and waits for the motionState and target events # that indicate the motion has started. # While running that axis will not be commanded. # This avoids the problem of new telescope target events # causing unwanted motion when the dome has been commanded # but has not yet had a chance to report the fact. self.move_dome_azimuth_task = utils.make_done_future() # Next telescope target, eventually from the scheduler; # an ElevationAzimuth; None before the next target is seen; self.next_telescope_target = None super().__init__( name="ATDomeTrajectory", config_schema=CONFIG_SCHEMA, config_dir=config_dir, index=None, initial_state=initial_state, override=override, simulation_mode=0, ) self.atmcs_remote = salobj.Remote(domain=self.domain, name="ATMCS", include=["target"]) self.dome_remote = salobj.Remote(domain=self.domain, name="ATDome", include=["azimuthCommandedState"]) self.atmcs_remote.evt_target.callback = self.atmcs_target_callback self.dome_remote.evt_azimuthCommandedState.callback = ( self.atdome_commanded_azimuth_state_callback)
def __init__(self, log: logging.Logger) -> None: self.log = (logging.getLogger(type(self).__name__) if log is None else log.getChild(type(self).__name__)) self.connection_timeout = 10.0 self.read_timeout = 10.0 self.move_timeout = 60.0 self.wait_ready_sleeptime = 0.5 self.connect_task = utils.make_done_future() self._reader: typing.Optional[asyncio.StreamReader] = None self._writer: typing.Optional[asyncio.StreamWriter] = None self.cmd_lock = asyncio.Lock() self.controller_ready = False
def __init__( self, index, config_dir=None, initial_state=salobj.State.STANDBY, simulation_mode=0, ): super().__init__( name="Electrometer", index=index, config_schema=CONFIG_SCHEMA, config_dir=config_dir, initial_state=initial_state, simulation_mode=simulation_mode, ) self.controller = controller.ElectrometerController(log=self.log) self.simulator = None self.run_event_loop = False self.event_loop_task = utils.make_done_future()
def __init__( self, initial_state=salobj.State.STANDBY, simulation_mode=0, start_telemetry_publishing=True, ): self._add_config_commands() super().__init__( name="HVAC", index=0, initial_state=initial_state, simulation_mode=simulation_mode, ) self.mqtt_client = None self.start_telemetry_publishing = start_telemetry_publishing self.telemetry_task = utils.make_done_future() # Keep track of the internal state of the MQTT topics. This will # collect all values for the duration of HVAC_STATE_TRACK_PERIOD before # the median is sent via SAL telemetry. The structure is # "topic" : { # "item": InternalItemState # } # and this gets initialized in the connect method. self.hvac_state = None # The host and port to connect to. self.host = "hvac01.cp.lsst.org" self.port = 1883 # Helper for reading the HVAC data self.xml = MqttInfoReader() # Keep track of the device indices for the device mask self.device_id_index = {dev_id: i for i, dev_id in enumerate(DeviceId)}
def __init__( self, *, salinfo: SalInfo, attr_name: str, max_history: int, queue_len: int = DEFAULT_QUEUE_LEN, filter_ackcmd: bool = True, ) -> None: super().__init__(salinfo=salinfo, attr_name=attr_name) self.isopen = True self._allow_multiple_callbacks = False if max_history < 0: raise ValueError(f"max_history={max_history} must be >= 0") if max_history > 0 and self.volatile: raise ValueError( f"max_history={max_history} must be 0 for volatile topics") if salinfo.indexed and salinfo.index == 0 and max_history > 1: raise ValueError(f"max_history={max_history} must be 0 or 1 " "for an indexed component read with index=0") if queue_len <= MIN_QUEUE_LEN: raise ValueError( f"queue_len={queue_len} must be >= MIN_QUEUE_LEN={MIN_QUEUE_LEN}" ) if max_history > queue_len: raise ValueError( f"max_history={max_history} must be <= queue_len={queue_len}") if (max_history > self.qos_set.reader_qos.history.depth or max_history > self.qos_set.topic_qos.durability_service.history_depth): warnings.warn( f"max_history={max_history} > history depth={self.qos_set.reader_qos.history.depth} " f"and/or {self.qos_set.topic_qos.durability_service.history_depth}; " "you will get less historical data than you asked for.", UserWarning, ) self._max_history = int(max_history) self._data_queue: typing.Deque[ type_hints.BaseMsgType] = collections.deque(maxlen=queue_len) self._current_data: typing.Optional[type_hints.BaseMsgType] = None # Task that `next` waits on. # Its result is set to the oldest message on the queue. # We do this instead of having `next` itself pop the oldest message # because it allows multiple callers of `next` to all get the same # message, and it avoids a potential race condition with `flush`. self._next_task = utils.make_done_future() self._callback: typing.Optional[CallbackType] = None self._callback_tasks: typing.Set[asyncio.Task] = set() self._callback_loop_task = utils.make_done_future() self.dds_queue_length_checker = QueueCapacityChecker( descr=f"{attr_name} DDS read queue", log=self.log, queue_len=self.qos_set.reader_qos.history.depth, ) self.python_queue_length_checker = QueueCapacityChecker( descr=f"{attr_name} python read queue", log=self.log, queue_len=queue_len) # Command topics use a different a partition name than # all other topics, including ackcmd, and the partition name # is part of the publisher and subscriber. # This split allows us to create just one subscriber and one publisher # for each Controller or Remote: # `Controller` only needs a cmd_subscriber and data_publisher, # `Remote` only needs a cmd_publisher and data_subscriber. if attr_name.startswith("cmd_"): subscriber = salinfo.cmd_subscriber else: subscriber = salinfo.data_subscriber self._reader = subscriber.create_datareader(self._topic, self.qos_set.reader_qos) # TODO DM-26411: replace ANY_INSTANCE_STATE with ALIVE_INSTANCE_STATE # once the OpenSplice issue 00020647 is fixed. read_mask = [ dds.DDSStateKind.NOT_READ_SAMPLE_STATE, dds.DDSStateKind.ANY_INSTANCE_STATE, ] queries = [] if salinfo.index > 0: queries.append(f"salIndex = {salinfo.index}") if attr_name == "ack_ackcmd" and filter_ackcmd: queries += [ f"origin = {salinfo.domain.origin}", f"identity = '{salinfo.identity}'", ] if queries: full_query = " AND ".join(queries) read_condition = dds.QueryCondition(self._reader, read_mask, full_query) else: read_condition = self._reader.create_readcondition(read_mask) self._read_condition = read_condition salinfo.add_reader(self)
def __init__(self, initial_state=salobj.State.STANDBY): super().__init__( name="ATMCS", index=0, initial_state=initial_state, simulation_mode=1 ) # interval between telemetry updates (sec) self._telemetry_interval = 1 # number of event updates per telemetry update self._events_per_telemetry = 10 # task that runs while the events_and_telemetry_loop runs self._events_and_telemetry_task = utils.make_done_future() # task that runs while axes are slewing to a halt from stopTracking self._stop_tracking_task = utils.make_done_future() # task that runs while axes are halting before being disabled self._disable_all_drives_task = utils.make_done_future() # Dict of M3ExitPort (the instrument port M3 points to): tuple of: # * index of self.m3_port_positions: the M3 position for this port # * M3State (state of M3 axis when pointing to this port) # * Rotator axis at this port, as an Axis enum, # or None if this port has no rotator. self._port_info_dict = { M3ExitPort.NASMYTH1: (0, M3State.NASMYTH1, Axis.NA1), M3ExitPort.NASMYTH2: (1, M3State.NASMYTH2, Axis.NA2), M3ExitPort.PORT3: (2, M3State.PORT3, None), } # Name of minimum limit switch event for each axis. self._min_lim_names = ( "elevationLimitSwitchLower", "azimuthLimitSwitchCW", "nasmyth1LimitSwitchCW", "nasmyth2LimitSwitchCW", "m3RotatorLimitSwitchCW", ) # Name of maximum limit switch event for each axis. self._max_lim_names = ( "elevationLimitSwitchUpper", "azimuthLimitSwitchCCW", "nasmyth1LimitSwitchCCW", "nasmyth2LimitSwitchCCW", "m3RotatorLimitSwitchCCW", ) # Name of "in position" event for each axis, # excluding ``allAxesInPosition``. self._in_position_names = ( "elevationInPosition", "azimuthInPosition", "nasmyth1RotatorInPosition", "nasmyth2RotatorInPosition", "m3InPosition", ) # Name of drive status events for each axis. self._drive_status_names = ( ("elevationDriveStatus",), ( "azimuthDrive1Status", "azimuthDrive2Status", ), ("nasmyth1DriveStatus",), ("nasmyth2DriveStatus",), ("m3DriveStatus",), ) # Name of brake events for each axis. self._brake_names = ( ("elevationBrake",), ( "azimuthBrake1", "azimuthBrake2", ), ("nasmyth1Brake",), ("nasmyth2Brake",), (), ) # Has tracking been enabled by startTracking? # This remains true until stopTracking is called or the # summary state is no longer salobj.State.Enabled, # even if some drives have been disabled by running into limits. self._tracking_enabled = False # Is this particular axis enabled? # This remains true until stopTracking is called or the # summary state is no longer salobj.State.Enabled, # or the axis runs into a limit. # Note that the brakes automatically come on/off # if the axis is disabled/enabled, respectively. self._axis_enabled = np.zeros([5], dtype=bool) # Timer to kill tracking if trackTarget doesn't arrive in time. self._kill_tracking_timer = utils.make_done_future() self.configure()
def __init__(self) -> None: self._future = make_done_future() self._future_task: typing.Union[None, asyncio.Task] = None self.fail = False self.called = 0
def __init__( self, domain: Domain, name: str, index: typing.Optional[int] = 0, write_only: bool = False, ) -> None: if not isinstance(domain, Domain): raise TypeError( f"domain {domain!r} must be an lsst.ts.salobj.Domain") if index is not None: if not (isinstance(index, int) or isinstance(index, enum.IntEnum)): raise TypeError( f"index {index!r} must be an integer, enum.IntEnum, or None" ) self.domain = domain self.name = name self.index = 0 if index is None else index self.write_only = write_only self.identity = domain.default_identity self.log = logging.getLogger(self.name) if self.log.getEffectiveLevel() > MAX_LOG_LEVEL: self.log.setLevel(MAX_LOG_LEVEL) # Dict of SAL topic name: wait_for_historical_data succeeded # for each topic for which wait_for_historical_data was called. # This is primarily intended for unit tests. self.wait_history_isok: typing.Dict[str, bool] = dict() self.partition_prefix = os.environ.get("LSST_DDS_PARTITION_PREFIX") if self.partition_prefix is None: raise RuntimeError( "Environment variable $LSST_DDS_PARTITION_PREFIX not defined.") self.isopen = True self.start_called = False self.done_task: asyncio.Future = asyncio.Future() self.start_task: asyncio.Future = asyncio.Future() # Parse environment variable LSST_DDS_ENABLE_AUTHLIST # to determine whether to implement command authorization. # TODO DM-32379: remove this code block, including the # default_authorize attribute. authorize_str = os.environ.get("LSST_DDS_ENABLE_AUTHLIST", "0") if authorize_str not in ("0", "1"): self.log.warning( f"Invalid value $LSST_DDS_ENABLE_AUTHLIST={authorize_str!r}. " "Specify '1' to enable, '0' or undefined to disable " "authlist-based command authorization. Disabling.") self.default_authorize = authorize_str == "1" if self.default_authorize: self.log.info("Enabling authlist-based command authorization") else: self.log.info("Disabling authlist-based command authorization") self.authorized_users: typing.Set[str] = set() self.non_authorized_cscs: typing.Set[str] = set() # Publishers and subscribers. # Create at need to avoid unnecessary instances. # Controller needs a _cmd_publisher and _data_subscriber. # Remote needs a _cmd_subscriber and _data_publisher. self._cmd_publisher = None self._cmd_subscriber = None self._data_publisher = None self._data_subscriber = None # dict of private_seqNum: salobj.topics.CommandInfo self._running_cmds: typing.Dict[int, topics.CommandInfo] = dict() # dict of dds.ReadCondition: salobj topics.ReadTopic # This is needed because read conditions don't store the associated # data reader. When a wait on a dds.WaitSet returns a read condition # we use this dict to figure out which topic to read. self._reader_dict: typing.Dict[dds.ReadCondition, topics.ReadTopic] = dict() # list of salobj topics.WriteTopic self._writer_list: typing.List[topics.WriteTopic] = list() # the first RemoteCommand created should set this to # an lsst.ts.salobj.topics.AckCmdReader # and set its callback to self._ackcmd_callback self._ackcmd_reader: typing.Optional[topics.AckCmdReader] = None # the first ControllerCommand created should set this to # an lsst.ts.salobj.topics.AckCmdWriter self._ackcmd_writer: typing.Optional[topics.AckCmdWriter] = None # wait_timeout is a failsafe for shutdown; normally all you have to do # is call `close` to trigger the guard condition and stop the wait self._wait_timeout = dds.DDSDuration(sec=10) self._guardcond = dds.GuardCondition() self._waitset = dds.WaitSet() self._waitset.attach(self._guardcond) self._read_loop_task = utils.make_done_future() idl_path = domain.idl_dir / f"sal_revCoded_{self.name}.idl" if not idl_path.is_file(): raise RuntimeError( f"Cannot find IDL file {idl_path} for name={self.name!r}") self.parsed_idl = ddsutil.parse_idl_file(idl_path) self.metadata = idl_metadata.parse_idl(name=self.name, idl_path=idl_path) self.parse_metadata() # Adds self.indexed, self.revnames, etc. if self.index != 0 and not self.indexed: raise ValueError( f"Index={index!r} must be 0 or None; {name} is not an indexed SAL component" ) if len(self.command_names) > 0: ackcmd_revname = self.revnames.get("ackcmd") if ackcmd_revname is None: raise RuntimeError( f"Could not find {self.name} topic 'ackcmd'") self._ackcmd_type: type_hints.AckCmdDataType = ddsutil.make_dds_topic_class( parsed_idl=self.parsed_idl, revname=ackcmd_revname) domain.add_salinfo(self) # Make sure the background thread terminates. atexit.register(self.basic_close)