def main(): '''Boot OCS agent''' txaio.start_logging(level=os.environ.get('LOGLEVEL', 'info')) parser = site_config.add_arguments() args = site_config.parse_args('stmACAgent') agent_inst, runner = ocs_agent.init_site_agent(args) stm_ac_agent = stmACAgent(agent_inst) # agent_inst.register_task( # 'init_pcr', # stm_ac_agent.init_pcr500 # ) agent_inst.register_process('acq', stm_ac_agent.start_acq, stm_ac_agent.stop_acq, startup=True) agent_inst.register_task('set_values', stm_ac_agent.set_values) agent_inst.register_task('get_values', stm_ac_agent.get_values) agent_inst.register_task('getACstatus', stm_ac_agent.getACstatus) agent_inst.register_task('rampVoltage', stm_ac_agent.rampVoltage) agent_inst.register_task('forceZero', stm_ac_agent.forceZero) runner.run(agent_inst, auto_reconnect=True)
def main(): parser = make_parser() args = site_config.parse_args(agent_class='CryomechCPAAgent', parser=parser) print('I am in charge of device with serial number: %s' % args.serial_number) # Automatically acquire data if requested (default) init_params = False if args.mode == 'init': init_params = {'auto_acquire': False} elif args.mode == 'acq': init_params = {'auto_acquire': True} # Call launcher function (initiates connection to appropriate # WAMP hub and realm). agent, runner = ocs_agent.init_site_agent(args) # create agent instance and run log creation ptc = PTCAgent(agent, args.port, args.ip_address, fake_errors=args.fake_errors) agent.register_task('init', ptc.init, startup=init_params) agent.register_process('acq', ptc.acq, ptc._stop_acq) runner.run(agent, auto_reconnect=True)
def main(): # Start logging txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) parser = make_parser() args = site_config.parse_args(agent_class='Lakeshore425Agent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) # Automatically acquire data if requested (default) init_params = False if args.mode == 'init': init_params = {'auto_acquire': False} elif args.mode == 'acq': init_params = {'auto_acquire': True} kwargs = {'port': args.port} if args.sampling_frequency is not None: kwargs['f_sample'] = float(args.sampling_frequency) gauss = LS425Agent(agent, **kwargs) agent.register_task('init_lakeshore', gauss.init_lakeshore, startup=init_params) agent.register_task('operational_status', gauss.operational_status) agent.register_task('zero_calibration', gauss.zero_calibration) agent.register_task('any_command', gauss.any_command) agent.register_process('acq', gauss.acq, gauss._stop_acq) runner.run(agent, auto_reconnect=True)
def main(args=None): if args is None: args = sys.argv[1:] parser = get_parser() # Note this call adds a bunch of args to the parser, and parses them # including looking up the site config file and loading defaults from # there. args = site_config.parse_args(agent_class='*control*', parser=parser, args=args) if args.command == 'scan': scan(parser, args) elif args.command == 'shell': shell(parser, args) elif args.command == 'listen': listen(parser, args) else: parser.error(f"Unknown command '{args.command}'")
def main(): parser = make_parser() args = site_config.parse_args( agent_class='VantagePro2Agent', parser=parser) init_params = False if args.mode == 'init': init_params = {'auto_acquire': False} elif args.mode == 'acq': init_params = {'auto_acquire': True} device_port = None if args.port is not None: device_port = args.port else: # Tries to find correct USB port automatically # This exists if udev rules are setup properly for the 240s if os.path.exists('/dev/{}'.format(args.serial_number)): device_port = "/dev/{}".format(args.serial_number) elif os.path.exists('/dev/serial/by-id'): ports = os.listdir('/dev/serial/by-id') for port in ports: if args.serial_number in port: device_port = "/dev/serial/by-id/{}".format(port) print("Found port {}".format(device_port)) break if device_port is None: print("Could not find device port for {}".format(args.serial_number)) return agent, runner = ocs_agent.init_site_agent(args) vPro2 = VantagePro2Agent(agent, device_port, args.sample_freq) agent.register_task('init', vPro2.init_VantagePro2_task, startup=init_params) agent.register_process('acq', vPro2.start_acq, vPro2.stop_acq, blocking=True) runner.run(agent, auto_reconnect=True)
self._run = False return True, 'requested to stop main process' else: return False, 'main process not currently running' def _register_agent(self, session, agent_data): self.log.warn( "Warning!!! The register_agent task has been deprecated. Agent '{}' " "is using an out of date version of ocs or socs!!".format( agent_data['agent_address'])) return True, "'register_agent' is deprecated" if __name__ == '__main__': args = site_config.parse_args(agent_class='RegistryAgent', parser=None) agent, runner = ocs_agent.init_site_agent(args) registry = Registry(agent) agent.register_process('main', registry.main, registry._stop_main, blocking=False, startup=True) agent.register_task('register_agent', registry._register_agent, blocking=False) runner.run(agent, auto_reconnect=True)
def make_parser(parser=None): """ Build the argument parser for the Agent. Allows sphinx to automatically build documentation based on this function. """ if parser is None: parser = argparse.ArgumentParser() # Add options specific to this agent. pgroup = parser.add_argument_group("Agent Options") pgroup.add_argument('--shm-addr', help='Shelf manager addres i.e. [email protected]') pgroup.add_argument('--crate-id', help='Crate id used for block_name') return parser if __name__ == '__main__': LOG = txaio.make_logger() parser = make_parser() args = site_config.parse_args(agent_class='CrateAgent', parser=parser) startup = True agent, runner = ocs_agent.init_site_agent(args) shm_addr = args.shm_addr crate_id = args.crate_id smurfcrate = SmurfCrateMonitor(agent, crate_id, shm_addr) agent.register_task('init_crate', smurfcrate.init_crate, startup=startup) agent.register_process('acq', smurfcrate.start_acq, smurfcrate.stop_acq) runner.run(agent, auto_reconnect=True)
default=8086, help="InfluxDB port.") pgroup.add_argument('--database', default='ocs_feeds', help="Database within InfluxDB to publish data to.") pgroup.add_argument('--protocol', default='line', choices=['json', 'line'], help="Protocol for writing data. Either 'line' or " "'json'.") pgroup.add_argument('--gzip', type=bool, default=False, help="Use gzip content encoding to compress requests.") return parser if __name__ == '__main__': # Start logging txaio.start_logging(level=environ.get("LOGLEVEL", "info")) parser = make_parser() args = site_config.parse_args(agent_class='InfluxDBAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) influx_agent = InfluxDBAgent(agent, args) runner.run(agent, auto_reconnect=True)
text = ''.join(upload_lines) current_lines = current_lines[group_size:] free_positions = self.data['status']['summary']\ ['Qty_of_free_program_track_stack_positions'] while free_positions < 5099: yield dsleep(0.1) free_positions = self.data['status']['summary']\ ['Qty_of_free_program_track_stack_positions'] yield self.acu.http.UploadPtStack(text) yield self.acu.stop() self.set_job_done('control') return True, 'Track generation ended cleanly' def add_agent_args(parser_in=None): if parser_in is None: parser_in = argparse.ArgumentParser() pgroup = parser_in.add_argument_group('Agent Options') pgroup.add_argument("--acu_config", default="guess") return parser_in if __name__ == '__main__': parser = add_agent_args() args = site_config.parse_args(agent_class='ACUAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) acu_agent = ACUAgent(agent, args.acu_config) runner.run(agent, auto_reconnect=True)
pgroup.add_argument('--verbose', '-v', action='count', default=0, help='PID Controller verbosity level.') pgroup.add_argument('--mode', type=str, default='iv_acq', choices=['idle', 'init', 'iv_acq'], help="Starting operation for the Agent.") return parser if __name__ == '__main__': parser = make_parser() args = site_config.parse_args(agent_class='RotationAgent', parser=parser) init_params = False if args.mode == 'init': init_params = {'auto_acquire': False} elif args.mode == 'iv_acq': init_params = {'auto_acquire': True} agent, runner = ocs_agent.init_site_agent(args) rotation_agent = RotationAgent(agent, kikusui_ip=args.kikusui_ip, kikusui_port=args.kikusui_port, pid_ip=args.pid_ip, pid_port=args.pid_port, pid_verbosity=args.verbose) agent.register_process('iv_acq', rotation_agent.iv_acq,
help="Number of channels to stream from") pgroup.add_argument('--sample-rate', default=200, type=float, help="Sample rate for streaming data") pgroup.add_argument('--frame-len', default=2, type=float, help="Time per G3 data frame (seconds)") return parser if __name__ == '__main__': parser = make_parser() args = site_config.parse_args(agent_class='SmurfFileEmulator', parser=parser) txaio.start_logging(level=os.environ.get('LOGLEVEL', 'info')) agent, runner = ocs_agent.init_site_agent(args) file_em = SmurfFileEmulator(agent, args) agent.register_task('tune_dets', file_em.tune_dets) agent.register_task('take_iv', file_em.take_iv) agent.register_task('take_bias_steps', file_em.take_bias_steps) agent.register_task('bias_dets', file_em.bias_dets) agent.register_process('stream', file_em.stream, file_em._stop_stream) runner.run(agent, auto_reconnect=True)
return False, 'acq is not currently running' def make_parser(parser=None): """Build the argument parser for the Agent. Allows sphinx to automatically build documentation based on this function. """ if parser is None: parser = argparse.ArgumentParser() pgroup = parser.add_argument_group('Agent Options') pgroup.add_argument('--ip_address') pgroup.add_argument('--port') return parser if __name__ == '__main__': parser = make_parser() args = site_config.parse_args(agent_class='PfeifferAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) pfeiffer_agent = PfeifferAgent(agent, args.ip_address, args.port) agent.register_process('acq', pfeiffer_agent.start_acq, pfeiffer_agent.stop_acq, startup=True) agent.register_task('close', pfeiffer_agent.stop_acq) runner.run(agent, auto_reconnect=True)
pgroup.add_argument('--threshold', type=float, default=0.1, help='The upper bound on temperature differences ' 'for stability check') pgroup.add_argument('--window', type=float, default=900., help='The lookback time on temperature differences ' 'for stability check') pgroup.add_argument('--auto-acquire', type=bool, default=True, help='Automatically start data acquisition on startup') return parser if __name__ == '__main__': # Create an argument parser parser = make_parser() args = site_config.parse_args( agent_class='Lakeshore336Agent', parser=parser) # Automatically acquire data if requested init_params = False if args.auto_acquire: init_params = {'auto_acquire': True} print('I am in charge of device with serial ' 'number: %s' % args.serial_number) # Create a session and a runner which communicate over WAMP agent, runner = ocs_agent.init_site_agent(args) # Pass the new agent session to the agent class lake_agent = LS336_Agent(agent, args.serial_number, args.ip_address, args.f_sample, args.threshold, args.window)
Within twisted, asynchronous routines that perform blocking operations (without blocking the reactor thread) will return a Deferred object, immediately, instead of the result of the request you have made. To suspend the function until the actual result is ready, you should: - decorate the function with @inlineCallbacks. - use the idiom "x = yield Function(...)" idiom. This latter idiom is used on the dsleep function; not because we care what the function returned but because we definitely want to wait until that function has completed before proceeding with the next step in our operation. """ for step in range(5): session.add_message('task2-nonblocking step %i' % step) yield dsleep(1) return True, 'task-2 nonblocking complete.' if __name__ == '__main__': args = site_config.parse_args(agent_class='*host*') agent, runner = ocs_agent.init_site_agent(args) my_hd = MyHardwareDevice() agent.register_task('task1', my_hd.task1) agent.register_task('task2', my_hd.task2, blocking=False) runner.run(agent, auto_reconnect=True)
help="Number of failed copy attempts before the agent " "will stop trying to copy a file") pgroup.add_argument( '--copy-timeout', type=float, help="Time (sec) before the rsync command will timeout") pgroup.add_argument('--cmd-timeout', type=float, help="Time (sec) before remote commands will timeout") pgroup.add_argument('--files-per-batch', type=int, help="Number of files to copy over per batch. Default " "is None, which will copy over all available files.") pgroup.add_argument('--sleep-time', type=float, default=60, help="Time to sleep (sec) in between copy iterations") return parser if __name__ == '__main__': parser = make_parser() args = site_config.parse_args('SupRsync', parser=parser) txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) agent, runner = ocs_agent.init_site_agent(args) suprsync = SupRsync(agent, args) agent.register_process('run', suprsync.run, suprsync._stop, startup=True) runner.run(agent, auto_reconnect=True)
return parser if __name__ == '__main__': # For logging txaio.use_twisted() LOG = txaio.make_logger() # Start logging txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) parser = make_parser() # Interpret options in the context of site_config. args = site_config.parse_args(agent_class = 'FTSAerotechAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) fts_agent = FTSAerotechAgent(agent, args.ip_address, args.port, args.mode, args.sampling_frequency) agent.register_task('init_stage', fts_agent.init_stage_task) agent.register_task('move_to', fts_agent.move_to) agent.register_task('home', fts_agent.home_task) agent.register_process('acq', fts_agent.start_acq, fts_agent.stop_acq) runner.run(agent, auto_reconnect=True)
pgroup.add_argument('--target-rate', default=10, type=float, help="Target rate for monitored readout channels in " "Hz. This willl be the rate that detector data is " "streamed to an OCS feed") return parser if __name__ == "__main__": # Start logging txaio.start_logging(level=environ.get("LOGLEVEL", "info")) parser = make_parser() args = site_config.parse_args(agent_class='SmurfRecorder', parser=parser) agent, runner = ocs_agent.init_site_agent(args) listener = SmurfRecorder(agent, int(args.time_per_file), args.data_dir, args.stream_id, address=args.address, port=int(args.port), target_rate=args.target_rate) agent.register_process("record", listener.start_record, listener.stop_record, startup=bool(args.auto_start)) agent.register_task('set_monitored_channels',
nargs='+', type=int, default=[], help="Readout channels to start monitoring on startup") pgroup.add_argument('--monitored-channel-rate', type=float, default=10, help="Target sample rate for monitored channels") return parser if __name__ == '__main__': txaio.use_twisted() txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) parser = make_parser() args = site_config.parse_args(agent_class='MagpieAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) magpie = MagpieAgent(agent, args) if args.fake_data: read_startup = False else: read_startup = {'src': args.src} agent.register_process('read', magpie.read, magpie._stop_read, startup=read_startup) agent.register_process('stream_fake_data', magpie.stream_fake_data,
acq_txt += ', **acq_reg**: read out custom configured registers' acq_txt += ', or **idle**: leave device idle at startup.' pgroup.add_argument('--mode', default='acq', choices=['idel', 'acq', 'acq_reg'], help=acq_txt) return parser if __name__ == '__main__': # Start logging txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) parser = make_parser() args = site_config.parse_args(agent_class='LabJackAgent', parser=parser) init_params = False if args.mode == 'acq': init_params = {'auto_acquire': True} if args.mode == 'acq_reg': init_params = {'auto_acquire_reg': True} ip_address = str(args.ip_address) active_channels = args.active_channels function_file = str(args.function_file) sampling_frequency = float(args.sampling_frequency) agent, runner = ocs_agent.init_site_agent(args) sensors = LabJackAgent(agent,
""" Stops the data acquisiton. """ if self.take_data: self.take_data = False return True, 'requested to stop taking data.' return False, 'acq is not currently running.' def make_parser(parser=None): if parser is None: parser = argparse.ArgumentParser() # Add options specific to this agent. pgroup = parser.add_argument_group('Agent Options') pgroup.add_argument('--port', type=int, default=8080) return parser # Portion of the code that runs if __name__ == '__main__': parser = make_parser() args = site_config.parse_args(agent_class='HWPBBBAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) hwp_bbb_agent = HWPBBBAgent(agent, port=args.port) agent.register_process('acq', hwp_bbb_agent.start_acq, hwp_bbb_agent.stop_acq, startup=True) runner.run(agent, auto_reconnect=True)
def main(): # Start logging txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) parser = make_parser() #Not used anymore, but we don't it to break the agent if these args are passed parser.add_argument('--fake-data', help=argparse.SUPPRESS) parser.add_argument('--num-channels', help=argparse.SUPPRESS) # Interpret options in the context of site_config. args = site_config.parse_args(agent_class='Lakeshore240Agent', parser=parser) if args.fake_data is not None: warnings.warn( "WARNING: the --fake-data parameter is deprecated, please " "remove from your site-config file", DeprecationWarning) if args.num_channels is not None: warnings.warn( "WARNING: the --num-channels parameter is deprecated, please " "remove from your site-config file", DeprecationWarning) # Automatically acquire data if requested (default) init_params = False if args.mode == 'init': init_params = {'auto_acquire': False} elif args.mode == 'acq': init_params = {'auto_acquire': True} device_port = None if args.port is not None: device_port = args.port else: # Tries to find correct USB port automatically # This exists if udev rules are setup properly for the 240s if os.path.exists('/dev/{}'.format(args.serial_number)): device_port = "/dev/{}".format(args.serial_number) elif os.path.exists('/dev/serial/by-id'): ports = os.listdir('/dev/serial/by-id') for port in ports: if args.serial_number in port: device_port = "/dev/serial/by-id/{}".format(port) print("Found port {}".format(device_port)) break if device_port is None: print("Could not find device port for {}".format(args.serial_number)) return agent, runner = ocs_agent.init_site_agent(args) kwargs = {'port': device_port} if args.sampling_frequency is not None: kwargs['f_sample'] = float(args.sampling_frequency) therm = LS240_Agent(agent, **kwargs) agent.register_task('init_lakeshore', therm.init_lakeshore, startup=init_params) agent.register_task('set_values', therm.set_values) agent.register_task('upload_cal_curve', therm.upload_cal_curve) agent.register_process('acq', therm.acq, therm._stop_acq) runner.run(agent, auto_reconnect=True)
pgroup.add_argument('--sampling_frequency') return parser if __name__ == '__main__': # For logging txaio.use_twisted() LOG = txaio.make_logger() # Start logging txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) parser = make_parser() # Interpret options in the context of site_config. args = site_config.parse_args(agent_class='LATRtXYStageAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) xy_agent = LATRtXYStageAgent(agent, args.ip_address, args.port, args.mode, args.sampling_frequency) agent.register_task('init_xy_stage', xy_agent.init_xy_stage_task) agent.register_task('move_x_cm', xy_agent.move_x_cm) agent.register_task('move_y_cm', xy_agent.move_y_cm) agent.register_task('set_position', xy_agent.set_position) agent.register_process('acq', xy_agent.start_acq, xy_agent.stop_acq) runner.run(agent, auto_reconnect=True)
""" if parser is None: parser = argparse.ArgumentParser() # Add options specific to this agent. pgroup = parser.add_argument_group('Agent Options') pgroup.add_argument('--ip-address') pgroup.add_argument('--gpib-slot') return parser if __name__ == '__main__': parser = make_parser() args = site_config.parse_args(agent_class='ScpiPsuAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) p = ScpiPsuAgent(agent, args.ip_address, int(args.gpib_slot)) agent.register_task('init', p.init_psu) agent.register_task('set_voltage', p.set_voltage) agent.register_task('set_current', p.set_current) agent.register_task('set_output', p.set_output) agent.register_process('monitor_output', p.monitor_output, p.stop_monitoring) runner.run(agent, auto_reconnect=True)
""" if parser is None: parser = argparse.ArgumentParser() # Add options specific to this agent. pgroup = parser.add_argument_group('Agent Options') pgroup.add_argument('--log-directory') return parser if __name__ == '__main__': # Start logging txaio.start_logging(level=os.environ.get("LOGLEVEL", "info")) # Setup argument parser parser = make_parser() args = site_config.parse_args(agent_class='BlueforsAgent', parser=parser) LOG.info('I am following logs located at : %s' % args.log_directory) agent, runner = ocs_agent.init_site_agent(args) bluefors_agent = BlueforsAgent(agent, args.log_directory) agent.register_process('acq', bluefors_agent.acq, bluefors_agent._stop_acq, startup=True) runner.run(agent, auto_reconnect=True)
"Agent startup.") pgroup.add_argument("--address", help="Address to listen to.") pgroup.add_argument("--port", default=161, help="Port to listen on.") pgroup.add_argument("--snmp-version", default='3', choices=['1', '2', '3'], help="SNMP version for communication. Must match " + "configuration on the M1000.") return parser if __name__ == "__main__": # Start logging txaio.start_logging(level=environ.get("LOGLEVEL", "info")) parser = make_parser() args = site_config.parse_args(agent_class="MeinbergM1000Agent", parser=parser) agent, runner = ocs_agent.init_site_agent(args) listener = MeinbergM1000Agent(agent, address=args.address, port=int(args.port), version=int(args.snmp_version)) agent.register_process("acq", listener.acq, listener._stop_acq, startup=bool(args.auto_start), blocking=False) runner.run(agent, auto_reconnect=True)
if parser is None: parser = argparse.ArgumentParser() pgroup = parser.add_argument_group('Agent Options') pgroup.add_argument('--data-dir', required=True, help="Base directory to store data. " "Subdirectories will be made here.") pgroup.add_argument('--initial-state', default='idle', choices=['idle', 'record'], help="Initial state of argument parser. Can be either" "idle or record") pgroup.add_argument('--time-per-file', default='3600', help="Time per file in seconds. Defaults to 1 hr") return parser if __name__ == '__main__': # Start logging txaio.start_logging(level=environ.get("LOGLEVEL", "info")) parser = make_parser() args = site_config.parse_args(agent_class='AggregatorAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) data_aggregator = AggregatorAgent(agent, args) runner.run(agent, auto_reconnect=True)
pgroup.add_argument("--num-chans", default=528, help="Number of detector channels to simulate.") pgroup.add_argument("--stream-id", default="stream_sim", help="Stream ID for the simulator.") return parser if __name__ == '__main__': # Start logging txaio.start_logging(level=environ.get("LOGLEVEL", "info")) parser = make_parser() args = site_config.parse_args(agent_class='SmurfStreamSimulator', parser=parser) agent, runner = ocs_agent.init_site_agent(args) sim = SmurfStreamSimulator(agent, target_host=args.target_host, port=int(args.port), num_chans=int(args.num_chans), stream_id=args.stream_id) agent.register_process('stream', sim.start_background_streamer, sim.stop_background_streamer, startup=bool(args.auto_start)) agent.register_task('start', sim.set_stream_on) agent.register_task('stop', sim.set_stream_off)
help='Frequency at which to produce data.') pgroup.add_argument( '--frame-length', default=60, type=int, help='Frame length to pass to the aggregator parameters.') return parser_in if __name__ == '__main__': # Start logging txaio.start_logging(level=environ.get("LOGLEVEL", "info")) parser = add_agent_args() args = site_config.parse_args(agent_class='FakeDataAgent', parser=parser) startup = False if args.mode == 'acq': startup = True agent, runner = ocs_agent.init_site_agent(args) fdata = FakeDataAgent(agent, num_channels=args.num_channels, sample_rate=args.sample_rate, frame_length=args.frame_length) agent.register_process('acq', fdata.acq, fdata._stop_acq, blocking=True,
if parser is None: parser = argparse.ArgumentParser() pgroup = parser.add_argument_group('Agent Options') pgroup.add_argument('--udp-port', type=int, help="Port for upd-publisher") pgroup.add_argument('--create-table', type=bool, help="Specifies whether agent should create or update " "pysmurf_files table if non exists.", default=True) pgroup.add_argument('--db-path', type=str, default='/data/so/databases/suprsync.db', help="Path to suprsync sqlite database") pgroup.add_argument('--echo-sql', action='store_true') return parser if __name__ == '__main__': parser = make_parser() args = site_config.parse_args(agent_class='PysmurfMonitor', parser=parser) agent, runner = ocs_agent.init_site_agent(args) monitor = PysmurfMonitor(agent, args) agent.register_process('run', monitor.run, monitor._stop, startup=True) reactor.listenUDP(args.udp_port, monitor) runner.run(agent, auto_reconnect=True)
build documentation based on this function. """ if parser is None: parser = argparse.ArgumentParser() # Add options specific to this agent. pgroup = parser.add_argument_group('Agent Options') pgroup.add_argument('--ip-address') pgroup.add_argument('--username') pgroup.add_argument('--password') return parser if __name__ == '__main__': parser = make_parser() args = site_config.parse_args(agent_class='SynAccAgent', parser=parser) agent, runner = ocs_agent.init_site_agent(args) p = SynaccessAgent(agent, ip_address=args.ip_address, username=args.username, password=args.password) agent.register_task('get_status', p.get_status, startup={}) agent.register_task('reboot', p.reboot) agent.register_task('set_outlet', p.set_outlet) agent.register_task('set_all', p.set_all) runner.run(agent, auto_reconnect=True)