def main(): # initialize application args = get_argparser().parse_args() if args.db_file is None: args.db_file = os.path.join(get_user_config_dir(), "artiq_browser.pyon") widget_log_handler = log.init_log(args, "browser") app = QtWidgets.QApplication(["ARTIQ Browser"]) loop = QEventLoop(app) asyncio.set_event_loop(loop) atexit.register(loop.close) datasets_sub = models.LocalModelManager(datasets.Model) datasets_sub.init({}) smgr = state.StateManager(args.db_file) browser = Browser(smgr, datasets_sub, args.browse_root, args.server, args.port) widget_log_handler.callback = browser.log.append_message if os.name == "nt": # HACK: show the main window before creating applets. # Otherwise, the windows of those applets that are in detached # QDockWidgets fail to be embedded. browser.show() smgr.load() smgr.start() atexit_register_coroutine(smgr.stop) if args.select is not None: browser.files.select(args.select) browser.show() loop.run_until_complete(browser.exit_request.wait())
def start(self): """ Start the server """ self.executor = ThreadPoolExecutor(max_workers=2) self.loop = loop = asyncio.get_event_loop() atexit.register(loop.close) # start control server bind = bind_address_from_args(self.args) loop.run_until_complete( self.server_control.start(bind, self.args.port_control)) atexit_register_coroutine(self.server_control.stop) # start notify server loop.run_until_complete( self.server_notify.start(bind, self.args.port_notify)) atexit_register_coroutine(self.server_notify.stop) asyncio.ensure_future(self.measurement_task()) for laser in self.lasers: asyncio.ensure_future(self.lock_task(laser)) # backup of configuration file backup_config(self.args, "_server") asyncio.ensure_future(regular_config_backup(self.args, "_server")) atexit.register(backup_config, self.args, "_server") logger.info("server started") self.running = True loop.run_forever()
def main(): args = get_argparser().parse_args() common_args.init_logger_from_args(args) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) atexit.register(loop.close) signal_handler = SignalHandler() signal_handler.setup() atexit.register(signal_handler.teardown) writer = DBWriter(args.baseurl_db, args.user_db, args.password_db, args.database, args.table) writer.start() atexit_register_coroutine(writer.stop) filter = Filter(args.pattern_file) rpc_server = Server({"influxdb_filter": filter}, builtin_terminate=True) loop.run_until_complete(rpc_server.start(common_args.bind_address_from_args(args), args.port_control)) atexit_register_coroutine(rpc_server.stop) reader = MasterReader(args.server_master, args.port_master, args.retry_master, filter._filter, writer) reader.start() atexit_register_coroutine(reader.stop) _, pending = loop.run_until_complete(asyncio.wait( [signal_handler.wait_terminate(), rpc_server.wait_terminate()], return_when=asyncio.FIRST_COMPLETED)) for task in pending: task.cancel()
def main(): args = get_argparser().parse_args() common_args.init_logger_from_args(args) loop = asyncio.get_event_loop() atexit.register(loop.close) writer = DBWriter(args.baseurl_db, args.user_db, args.password_db, args.database, args.table) writer.start() atexit_register_coroutine(writer.stop) log = Log(writer) server = Logger() rpc_server = Server({"schedule_logger": server}, builtin_terminate=True) loop.run_until_complete( rpc_server.start(common_args.bind_address_from_args(args), args.port_control)) atexit_register_coroutine(rpc_server.stop) reader = MasterReader(args.server_master, args.port_master, args.retry_master, log) reader.start() atexit_register_coroutine(reader.stop) loop.run_until_complete(rpc_server.wait_terminate())
def main(): args = get_argparser().parse_args() root_logger = logging.getLogger() root_logger.setLevel(logging.NOTSET) source_adder = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10, "ctlmgr({})".format(platform.node())) console_handler = logging.StreamHandler() console_handler.setFormatter(logging.Formatter( "%(levelname)s:%(source)s:%(name)s:%(message)s")) console_handler.addFilter(source_adder) root_logger.addHandler(console_handler) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) atexit.register(loop.close) signal_handler = SignalHandler() signal_handler.setup() atexit.register(signal_handler.teardown) logfwd = LogForwarder(args.server, args.port_logging, args.retry_master) logfwd.addFilter(source_adder) root_logger.addHandler(logfwd) logfwd.start() atexit_register_coroutine(logfwd.stop) ctlmgr = ControllerManager(args.server, args.port_notify, args.retry_master, args.host_filter) ctlmgr.start() atexit_register_coroutine(ctlmgr.stop) class CtlMgrRPC: retry_now = ctlmgr.retry_now rpc_target = CtlMgrRPC() rpc_server = Server({"ctlmgr": rpc_target}, builtin_terminate=True) loop.run_until_complete(rpc_server.start(common_args.bind_address_from_args(args), args.port_control)) atexit_register_coroutine(rpc_server.stop) _, pending = loop.run_until_complete(asyncio.wait( [signal_handler.wait_terminate(), rpc_server.wait_terminate()], return_when=asyncio.FIRST_COMPLETED)) for task in pending: task.cancel()
def main(): args = get_argparser().parse_args() root_logger = logging.getLogger() root_logger.setLevel(logging.NOTSET) source_adder = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10, "ctlmgr({})".format(platform.node())) console_handler = logging.StreamHandler() console_handler.setFormatter(logging.Formatter( "%(levelname)s:%(source)s:%(name)s:%(message)s")) console_handler.addFilter(source_adder) root_logger.addHandler(console_handler) if os.name == "nt": loop = asyncio.ProactorEventLoop() asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() atexit.register(loop.close) logfwd = LogForwarder(args.server, args.port_logging, args.retry_master) logfwd.addFilter(source_adder) root_logger.addHandler(logfwd) logfwd.start() atexit_register_coroutine(logfwd.stop) ctlmgr = ControllerManager(args.server, args.port_notify, args.retry_master) ctlmgr.start() atexit_register_coroutine(ctlmgr.stop) class CtlMgrRPC: retry_now = ctlmgr.retry_now rpc_target = CtlMgrRPC() rpc_server = Server({"ctlmgr": rpc_target}, builtin_terminate=True) loop.run_until_complete(rpc_server.start(common_args.bind_address_from_args(args), args.port_control)) atexit_register_coroutine(rpc_server.stop) loop.run_until_complete(rpc_server.wait_terminate())
def main(): args = get_argparser().parse_args() log_forwarder = init_log(args) if os.name == "nt": loop = asyncio.ProactorEventLoop() asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() atexit.register(loop.close) bind = common_args.bind_address_from_args(args) server_broadcast = Broadcaster() loop.run_until_complete(server_broadcast.start( bind, args.port_broadcast)) atexit_register_coroutine(server_broadcast.stop) log_forwarder.callback = (lambda msg: server_broadcast.broadcast("log", msg)) def ccb_issue(service, *args, **kwargs): msg = { "service": service, "args": args, "kwargs": kwargs } server_broadcast.broadcast("ccb", msg) device_db = DeviceDB(args.device_db) dataset_db = DatasetDB(args.dataset_db) dataset_db.start() atexit_register_coroutine(dataset_db.stop) worker_handlers = dict() if args.git: repo_backend = GitBackend(args.repository) else: repo_backend = FilesystemBackend(args.repository) experiment_db = ExperimentDB( repo_backend, worker_handlers, args.experiment_subdir) atexit.register(experiment_db.close) scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db) scheduler.start() atexit_register_coroutine(scheduler.stop) config = MasterConfig(args.name) worker_handlers.update({ "get_device_db": device_db.get_device_db, "get_device": device_db.get, "get_dataset": dataset_db.get, "update_dataset": dataset_db.update, "scheduler_submit": scheduler.submit, "scheduler_delete": scheduler.delete, "scheduler_request_termination": scheduler.request_termination, "scheduler_get_status": scheduler.get_status, "scheduler_check_pause": scheduler.check_pause, "ccb_issue": ccb_issue, }) experiment_db.scan_repository_async() server_control = RPCServer({ "master_config": config, "master_device_db": device_db, "master_dataset_db": dataset_db, "master_schedule": scheduler, "master_experiment_db": experiment_db }, allow_parallel=True) loop.run_until_complete(server_control.start( bind, args.port_control)) atexit_register_coroutine(server_control.stop) server_notify = Publisher({ "schedule": scheduler.notifier, "devices": device_db.data, "datasets": dataset_db.data, "explist": experiment_db.explist, "explist_status": experiment_db.status }) loop.run_until_complete(server_notify.start( bind, args.port_notify)) atexit_register_coroutine(server_notify.stop) server_logging = LoggingServer() loop.run_until_complete(server_logging.start( bind, args.port_logging)) atexit_register_coroutine(server_logging.stop) print("ARTIQ master is now ready.") loop.run_forever()
def start(self): """ Connect to the WaND servers """ def init_cb(db, mod): db.update(mod) return db async def subscriber_reconnect(self, server, db): logger.info("No connection to server '{}'".format(server)) for _, display in self.laser_displays.items(): if display.server == server: display.server = "" display.wake_loop.set() server_cfg = self.config["servers"][server] subscriber = self.subscribers[server][db]["subscriber"] if self.win.exit_request.is_set(): return def make_fut(self, server, db): fut = asyncio.ensure_future( subscriber_reconnect(self, server, db)) self.subscribers[server][db]["connected"] = False self.subscribers[server][db]["future"] = fut subscriber.disconnect_cb = functools.partial( make_fut, self, server, db) while not self.win.exit_request.is_set(): try: await subscriber.connect(server_cfg["host"], server_cfg["notify"]) logger.info("Reconnected to server '{}'".format(server)) break except OSError: logger.info("could not connect to '{}' retry in 10s..." .format(server)) await asyncio.sleep(10) for server, server_cfg in self.config["servers"].items(): self.subscribers[server] = {} # ask the servers to keep us updated with changes to laser settings # (exposures, references, etc) subscriber = Subscriber( "laser_db", functools.partial(init_cb, self.laser_db), functools.partial(self.notifier_cb, "laser_db", server)) fut = asyncio.ensure_future( subscriber_reconnect(self, server, "laser_db")) self.subscribers[server]["laser_db"] = { "subscriber": subscriber, "connected": False, "future": fut } # ask the servers to keep us updated with the latest frequency data subscriber = Subscriber( "freq_db", functools.partial(init_cb, self.freq_db), functools.partial(self.notifier_cb, "freq_db", server)) fut = asyncio.ensure_future( subscriber_reconnect(self, server, "freq_db")) self.subscribers[server]["freq_db"] = { "subscriber": subscriber, "connected": False, "future": fut } # ask the servers to keep us updated with the latest osa traces subscriber = Subscriber( "osa_db", functools.partial(init_cb, self.osa_db), functools.partial(self.notifier_cb, "osa_db", server)) fut = asyncio.ensure_future( subscriber_reconnect(self, server, "osa_db")) self.subscribers[server]["osa_db"] = { "subscriber": subscriber, "connected": False, "future": fut } atexit_register_coroutine(self.shutdown) self.win.showMaximized() atexit.register(self.win.exit_request.set) self.loop.run_until_complete(self.win.exit_request.wait())
def main(): # initialize application args = get_argparser().parse_args() widget_log_handler = log.init_log(args, "dashboard") # load any plugin modules first (to register argument_ui classes, etc.) if args.plugin_modules: for mod in args.plugin_modules: importlib.import_module(mod) if args.db_file is None: args.db_file = os.path.join( get_user_config_dir(), "artiq_dashboard_{server}_{port}.pyon".format( server=args.server.replace(":", "."), port=args.port_notify)) app = QtWidgets.QApplication(["ARTIQ Dashboard"]) loop = QEventLoop(app) asyncio.set_event_loop(loop) atexit.register(loop.close) smgr = state.StateManager(args.db_file) # create connections to master rpc_clients = dict() for target in "schedule", "experiment_db", "dataset_db", "device_db": client = AsyncioClient() loop.run_until_complete( client.connect_rpc(args.server, args.port_control, "master_" + target)) atexit.register(client.close_rpc) rpc_clients[target] = client config = Client(args.server, args.port_control, "master_config") try: server_name = config.get_name() finally: config.close_rpc() disconnect_reported = False def report_disconnect(): nonlocal disconnect_reported if not disconnect_reported: logging.error("connection to master lost, " "restart dashboard to reconnect") disconnect_reported = True sub_clients = dict() for notifier_name, modelf in (("explist", explorer.Model), ("explist_status", explorer.StatusUpdater), ("datasets", datasets.Model), ("schedule", schedule.Model)): subscriber = ModelSubscriber(notifier_name, modelf, report_disconnect) loop.run_until_complete( subscriber.connect(args.server, args.port_notify)) atexit_register_coroutine(subscriber.close) sub_clients[notifier_name] = subscriber broadcast_clients = dict() for target in "log", "ccb": client = Receiver(target, [], report_disconnect) loop.run_until_complete( client.connect(args.server, args.port_broadcast)) atexit_register_coroutine(client.close) broadcast_clients[target] = client # initialize main window main_window = MainWindow( args.server if server_name is None else server_name) smgr.register(main_window) mdi_area = MdiArea() mdi_area.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded) mdi_area.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded) main_window.setCentralWidget(mdi_area) # create UI components expmgr = experiments.ExperimentManager(main_window, sub_clients["datasets"], sub_clients["explist"], sub_clients["schedule"], rpc_clients["schedule"], rpc_clients["experiment_db"]) smgr.register(expmgr) d_shortcuts = shortcuts.ShortcutsDock(main_window, expmgr) smgr.register(d_shortcuts) d_explorer = explorer.ExplorerDock(expmgr, d_shortcuts, sub_clients["explist"], sub_clients["explist_status"], rpc_clients["schedule"], rpc_clients["experiment_db"], rpc_clients["device_db"]) smgr.register(d_explorer) d_datasets = datasets.DatasetsDock(sub_clients["datasets"], rpc_clients["dataset_db"]) smgr.register(d_datasets) d_applets = applets_ccb.AppletsCCBDock(main_window, sub_clients["datasets"], extra_substitutes={ "server": args.server, "port_notify": args.port_notify, "port_control": args.port_control, }) atexit_register_coroutine(d_applets.stop) smgr.register(d_applets) broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify) d_ttl_dds = moninj.MonInj(rpc_clients["schedule"]) loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify)) atexit_register_coroutine(d_ttl_dds.stop) d_schedule = schedule.ScheduleDock(rpc_clients["schedule"], sub_clients["schedule"]) smgr.register(d_schedule) logmgr = log.LogDockManager(main_window) smgr.register(logmgr) broadcast_clients["log"].notify_cbs.append(logmgr.append_message) widget_log_handler.callback = logmgr.append_message # lay out docks right_docks = [ d_explorer, d_shortcuts, d_ttl_dds.ttl_dock, d_ttl_dds.dds_dock, d_ttl_dds.dac_dock, d_datasets, d_applets ] main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, right_docks[0]) for d1, d2 in zip(right_docks, right_docks[1:]): main_window.tabifyDockWidget(d1, d2) main_window.addDockWidget(QtCore.Qt.BottomDockWidgetArea, d_schedule) # load/initialize state if os.name == "nt": # HACK: show the main window before creating applets. # Otherwise, the windows of those applets that are in detached # QDockWidgets fail to be embedded. main_window.show() smgr.load() smgr.start() atexit_register_coroutine(smgr.stop) # work around for https://github.com/m-labs/artiq/issues/1307 d_ttl_dds.ttl_dock.show() d_ttl_dds.dds_dock.show() # create first log dock if not already in state d_log0 = logmgr.first_log_dock() if d_log0 is not None: main_window.tabifyDockWidget(d_schedule, d_log0) if server_name is not None: server_description = server_name + " ({})".format(args.server) else: server_description = args.server logging.info("ARTIQ dashboard version: %s", artiq_version) logging.info("ARTIQ dashboard connected to moninj_proxy (%s)", server_description) # run main_window.show() loop.run_until_complete(main_window.exit_request.wait())
def __init__(self, smgr, datasets_sub, browse_root, master_host, master_port): QtWidgets.QMainWindow.__init__(self) smgr.register(self) icon = QtGui.QIcon(os.path.join(artiq_dir, "gui", "logo.svg")) self.setWindowIcon(icon) self.setWindowTitle("ARTIQ Browser") qfm = QtGui.QFontMetrics(self.font()) self.resize(140 * qfm.averageCharWidth(), 38 * qfm.lineSpacing()) self.exit_request = asyncio.Event() self.setUnifiedTitleAndToolBarOnMac(True) self.experiments = experiments.ExperimentsArea(browse_root, datasets_sub) smgr.register(self.experiments) self.experiments.setHorizontalScrollBarPolicy( QtCore.Qt.ScrollBarAsNeeded) self.experiments.setVerticalScrollBarPolicy( QtCore.Qt.ScrollBarAsNeeded) self.setCentralWidget(self.experiments) self.files = files.FilesDock(datasets_sub, browse_root) smgr.register(self.files) self.files.dataset_activated.connect( self.experiments.dataset_activated) self.files.dataset_changed.connect(self.experiments.dataset_changed) self.applets = applets.AppletsDock(self, datasets_sub) smgr.register(self.applets) atexit_register_coroutine(self.applets.stop) self.datasets = datasets.DatasetsDock(datasets_sub, master_host, master_port) smgr.register(self.datasets) self.files.metadata_changed.connect(self.datasets.metadata_changed) self.log = log.LogDock(None, "log") smgr.register(self.log) self.log.setFeatures(self.log.DockWidgetMovable | self.log.DockWidgetFloatable) self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.files) self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.applets) self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.datasets) self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.log) g = self.menuBar().addMenu("&Experiment") a = QtWidgets.QAction("&Open", self) a.setIcon(QtWidgets.QApplication.style().standardIcon( QtWidgets.QStyle.SP_DialogOpenButton)) a.setShortcuts(QtGui.QKeySequence.Open) a.setStatusTip("Open an experiment") a.triggered.connect(self.experiments.select_experiment) g.addAction(a) g = self.menuBar().addMenu("&View") a = QtWidgets.QAction("Cascade", self) a.setStatusTip("Cascade experiment windows") a.triggered.connect(self.experiments.cascadeSubWindows) g.addAction(a) a = QtWidgets.QAction("Tile", self) a.setStatusTip("Tile experiment windows") a.triggered.connect(self.experiments.tileSubWindows) g.addAction(a)
def main(): # connect to labrad acxn = connection() acxn.connect() acxn.add_on_connect("ParameterVault", parameter_vault_connect) acxn.add_on_disconnect("ParameterVault", parameter_vault_disconnect) # connect to laser room labrad laser_room_acxn = connection() laser_room_acxn.connect(host=laser_room_ip_address, password=lase_room_password, tls_mode="off") # initialize application args = get_argparser().parse_args() widget_log_handler = log.init_log(args, "dashboard") if args.db_file is None: args.db_file = os.path.join( get_user_config_dir(), "artiq_dashboard_{server}_{port}.pyon".format( server=args.server.replace(":", "."), port=args.port_notify)) app = QtWidgets.QApplication(["ARTIQ Dashboard"]) loop = QEventLoop(app) asyncio.set_event_loop(loop) atexit.register(loop.close) smgr = state.StateManager(args.db_file) # create connections to master rpc_clients = dict() for target in "schedule", "experiment_db", "dataset_db": client = AsyncioClient() loop.run_until_complete( client.connect_rpc(args.server, args.port_control, "master_" + target)) atexit.register(client.close_rpc) rpc_clients[target] = client config = Client(args.server, args.port_control, "master_config") try: server_name = config.get_name() finally: config.close_rpc() disconnect_reported = False def report_disconnect(): nonlocal disconnect_reported if not disconnect_reported: logging.error("connection to master lost, " "restart dashboard to reconnect") disconnect_reported = True sub_clients = dict() for notifier_name, modelf in (("explist", explorer.Model), ("explist_status", explorer.StatusUpdater), ("datasets", datasets.Model), ("schedule", schedule.Model)): subscriber = ModelSubscriber(notifier_name, modelf, report_disconnect) loop.run_until_complete( subscriber.connect(args.server, args.port_notify)) atexit_register_coroutine(subscriber.close) sub_clients[notifier_name] = subscriber broadcast_clients = dict() for target in "log", "ccb": client = Receiver(target, [], report_disconnect) loop.run_until_complete( client.connect(args.server, args.port_broadcast)) atexit_register_coroutine(client.close) broadcast_clients[target] = client # initialize main window tabs = TabWidget() main_main_window = MainWindow( args.server if server_name is None else server_name) main_window = MainWindow( args.server if server_name is None else server_name) main_main_window.setCentralWidget(tabs) smgr.register(tabs) smgr.register(main_main_window) smgr.register(main_window, "sortoflikeamainwindowbutnotquite") mdi_area = MdiArea() mdi_area.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded) mdi_area.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded) main_window.setCentralWidget(mdi_area) # create UI components expmgr = experiments.ExperimentManager(main_window, sub_clients["explist"], sub_clients["schedule"], rpc_clients["schedule"], rpc_clients["experiment_db"]) smgr.register(expmgr) # d_shortcuts = shortcuts.ShortcutsDock(main_window, expmgr) # smgr.register(d_shortcuts) d_pmt = pmt_control.PMTControlDock(acxn) smgr.register(d_pmt) d_parameter_editor = parameter_editor.ParameterEditorDock(acxn=acxn) smgr.register(d_parameter_editor) needs_parameter_vault.append(d_parameter_editor) d_explorer = explorer.ExplorerDock(expmgr, None, sub_clients["explist"], sub_clients["explist_status"], rpc_clients["schedule"], rpc_clients["experiment_db"]) smgr.register(d_explorer) d_datasets = datasets.DatasetsDock(sub_clients["datasets"], rpc_clients["dataset_db"]) smgr.register(d_datasets) d_applets = applets_ccb.AppletsCCBDock(main_window, sub_clients["datasets"]) atexit_register_coroutine(d_applets.stop) smgr.register(d_applets) broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify) d_ttl_dds = moninj.MonInj() loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify)) atexit_register_coroutine(d_ttl_dds.stop) d_schedule = schedule.ScheduleDock(rpc_clients["schedule"], sub_clients["schedule"]) smgr.register(d_schedule) logmgr = log.LogDockManager(main_window) smgr.register(logmgr) broadcast_clients["log"].notify_cbs.append(logmgr.append_message) widget_log_handler.callback = logmgr.append_message # lay out docks right_docks = [ d_explorer, d_pmt, d_parameter_editor, d_ttl_dds.ttl_dock, #d_ttl_dds.dds_dock, d_ttl_dds.dac_dock, d_datasets, d_applets ] main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, right_docks[0]) for d1, d2 in zip(right_docks, right_docks[1:]): main_window.tabifyDockWidget(d1, d2) main_window.addDockWidget(QtCore.Qt.BottomDockWidgetArea, d_schedule) tabs.addTab(main_window, "Control") laser_room_tab = LaserRoomTab() smgr.register(laser_room_tab) tabs.addTab(laser_room_tab, "Laser Room") # temp_controller_tab = TempControllerTab() # smgr.register(temp_controller_tab) # tabs.addTab(temp_controller_tab, "Temperature Controller") histograms_tab = ReadoutHistograms(acxn, smgr) smgr.register(histograms_tab) needs_parameter_vault.append(histograms_tab) tabs.addTab(histograms_tab, "Readout") drift_tracker_tab = DriftTracker(laser_room_acxn) smgr.register(drift_tracker_tab) tabs.addTab(drift_tracker_tab, "Drift Tracker") pulse_sequence_tab = PulseSequenceTab() smgr.register(pulse_sequence_tab) tabs.addTab(pulse_sequence_tab, "Pulse Sequence") smgr.load() smgr.start() atexit_register_coroutine(smgr.stop) # load/initialize state if os.name == "nt": # HACK: show the main window before creating applets. # Otherwise, the windows of those applets that are in detached # QDockWidgets fail to be embedded. main_window.show() # work around for https://github.com/m-labs/artiq/issues/1307 d_ttl_dds.ttl_dock.show() d_ttl_dds.dds_dock.show() # create first log dock if not already in state d_log0 = logmgr.first_log_dock() if d_log0 is not None: main_window.tabifyDockWidget(d_schedule, d_log0) if server_name is not None: server_description = server_name + " ({})".format(args.server) else: server_description = args.server logging.info("ARTIQ dashboard %s connected to %s", artiq_version, server_description) main_main_window.show() loop.run_until_complete(main_main_window.exit_request.wait())