Exemple #1
0
def main():
    # initialize application
    args = get_argparser().parse_args()
    widget_log_handler = log.init_log(args, "browser")

    app = QtWidgets.QApplication(["ARTIQ Browser"])
    loop = QEventLoop(app)
    asyncio.set_event_loop(loop)
    atexit.register(loop.close)

    datasets_sub = models.LocalModelManager(datasets.Model)
    datasets_sub.init({})

    smgr = state.StateManager(args.db_file)

    browser = Browser(smgr, datasets_sub, args.browse_root,
                      args.server, args.port)
    widget_log_handler.callback = browser.log.append_message

    if os.name == "nt":
        # HACK: show the main window before creating applets.
        # Otherwise, the windows of those applets that are in detached
        # QDockWidgets fail to be embedded.
        browser.show()
    smgr.load()
    smgr.start()
    atexit_register_coroutine(smgr.stop)

    if args.select is not None:
        browser.files.select(args.select)

    browser.show()
    loop.run_until_complete(browser.exit_request.wait())
Exemple #2
0
    def start(self):
        """ Start the server """

        self.executor = ThreadPoolExecutor(max_workers=2)
        self.loop = loop = asyncio.get_event_loop()
        atexit.register(loop.close)

        # start control server
        bind = bind_address_from_args(self.args)
        loop.run_until_complete(
            self.server_control.start(bind, self.args.port_control))
        atexit_register_coroutine(self.server_control.stop)

        # start notify server
        loop.run_until_complete(
            self.server_notify.start(bind, self.args.port_notify))
        atexit_register_coroutine(self.server_notify.stop)

        asyncio.ensure_future(self.measurement_task())

        for laser in self.lasers:
            asyncio.ensure_future(self.lock_task(laser))

        # backup of configuration file
        backup_config(self.args, "_server")
        asyncio.ensure_future(regular_config_backup(self.args, "_server"))
        atexit.register(backup_config, self.args, "_server")

        logger.info("server started")
        self.running = True
        loop.run_forever()
Exemple #3
0
    def __init__(self, datasets_sub, browse_root, select):
        QtWidgets.QMainWindow.__init__(self)

        icon = QtGui.QIcon(os.path.join(artiq_dir, "gui", "logo.svg"))
        self.setWindowIcon(icon)
        self.setWindowTitle("ARTIQ Browser")

        qfm = QtGui.QFontMetrics(self.font())
        self.resize(140 * qfm.averageCharWidth(), 38 * qfm.lineSpacing())

        self.exit_request = asyncio.Event()

        self.setUnifiedTitleAndToolBarOnMac(True)

        self.experiments = experiments.ExperimentsArea(browse_root,
                                                       datasets_sub)
        self.experiments.setHorizontalScrollBarPolicy(
            QtCore.Qt.ScrollBarAsNeeded)
        self.experiments.setVerticalScrollBarPolicy(
            QtCore.Qt.ScrollBarAsNeeded)
        self.setCentralWidget(self.experiments)

        self.files = files.FilesDock(datasets_sub, browse_root, select=select)

        self.applets = applets.AppletsDock(self, datasets_sub)
        atexit_register_coroutine(self.applets.stop)

        self.datasets = datasets.DatasetsDock(datasets_sub)

        self.log = log.LogDock(None, "log")
        self.log.setFeatures(self.log.DockWidgetMovable
                             | self.log.DockWidgetFloatable)

        self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.files)
        self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.applets)
        self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.datasets)
        self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.log)

        g = self.menuBar().addMenu("&Experiment")
        a = QtWidgets.QAction("&Open", self)
        a.setIcon(QtWidgets.QApplication.style().standardIcon(
            QtWidgets.QStyle.SP_DialogOpenButton))
        a.setShortcuts(QtGui.QKeySequence.Open)
        a.setStatusTip("Open an experiment")
        a.triggered.connect(self.experiments.select_experiment)
        g.addAction(a)

        g = self.menuBar().addMenu("&View")
        a = QtWidgets.QAction("Cascade", self)
        a.setStatusTip("Cascade experiment windows")
        a.triggered.connect(self.experiments.cascadeSubWindows)
        g.addAction(a)
        a = QtWidgets.QAction("Tile", self)
        a.setStatusTip("Tile experiment windows")
        a.triggered.connect(self.experiments.tileSubWindows)
        g.addAction(a)
Exemple #4
0
def main():
    # initialize application
    args = get_argparser().parse_args()
    init_logger(args)

    app = QtWidgets.QApplication([])
    loop = QEventLoop(app)
    asyncio.set_event_loop(loop)
    atexit.register(loop.close)
    smgr = state.StateManager(args.db_file)

    datasets_sub = models.LocalModelManager(datasets.Model)

    # initialize main window
    main_window = MainWindow()
    smgr.register(main_window)
    status_bar = QtWidgets.QStatusBar()
    main_window.setStatusBar(status_bar)
    mdi_area = MdiArea()
    mdi_area.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
    mdi_area.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
    main_window.setCentralWidget(mdi_area)

    d_results = results.ResultsBrowser(datasets_sub)
    smgr.register(d_results)

    d_applets = applets.AppletsDock(main_window, datasets_sub)
    atexit_register_coroutine(d_applets.stop)
    smgr.register(d_applets)

    d_datasets = datasets.DatasetsDock(datasets_sub)
    smgr.register(d_datasets)

    main_window.setCentralWidget(d_results)
    main_window.addDockWidget(QtCore.Qt.BottomDockWidgetArea, d_applets)
    main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, d_datasets)

    # load/initialize state
    if os.name == "nt":
        # HACK: show the main window before creating applets.
        # Otherwise, the windows of those applets that are in detached
        # QDockWidgets fail to be embedded.
        main_window.show()

    smgr.load()

    if args.PATH:
        d_results.select(args.PATH)

    smgr.start()
    atexit_register_coroutine(smgr.stop)

    # run
    main_window.show()
    loop.run_until_complete(main_window.exit_request.wait())
def main():
    args = get_argparser().parse_args()
    init_logger(args)

    loop = asyncio.get_event_loop()
    atexit.register(loop.close)

    writer = DBWriter(args.baseurl_db, args.user_db, args.password_db,
                      args.database, args.table)
    writer.start()
    atexit_register_coroutine(writer.stop)

    log = Log(writer)

    server = Logger()
    rpc_server = Server({"schedule_logger": server}, builtin_terminate=True)
    loop.run_until_complete(
        rpc_server.start(bind_address_from_args(args), args.port_control))
    atexit_register_coroutine(rpc_server.stop)

    reader = MasterReader(args.server_master, args.port_master,
                          args.retry_master, log)
    reader.start()
    atexit_register_coroutine(reader.stop)

    loop.run_until_complete(rpc_server.wait_terminate())
Exemple #6
0
def main():
    args = get_argparser().parse_args()
    init_logger(args)

    loop = asyncio.get_event_loop()
    atexit.register(loop.close)

    writer = DBWriter(args.baseurl_db,
                      args.user_db, args.password_db,
                      args.database, args.table)
    writer.start()
    atexit_register_coroutine(writer.stop)

    filter = Filter(args.pattern_file)
    rpc_server = Server({"influxdb_filter": filter}, builtin_terminate=True)
    loop.run_until_complete(rpc_server.start(bind_address_from_args(args),
                                             args.port_control))
    atexit_register_coroutine(rpc_server.stop)

    reader = MasterReader(args.server_master, args.port_master,
                          args.retry_master, filter._filter, writer)
    reader.start()
    atexit_register_coroutine(reader.stop)

    loop.run_until_complete(rpc_server.wait_terminate())
Exemple #7
0
def run_simple_rpc_server(port, setup_args, interface_name, setup_interface):
    parser = ArgumentParser()

    influxdb_args(parser)
    simple_network_args(parser, port)
    verbosity_args(parser)
    if setup_args:
        setup_args(parser)

    args = parser.parse_args()
    init_logger(args)

    loop = asyncio.get_event_loop()
    atexit.register(loop.close)

    influx_pusher = influxdb_pusher_from_args(args)
    if influx_pusher:
        t = asyncio.ensure_future(influx_pusher.run())
        def stop():
            t.cancel()
            try:
                loop.run_until_complete(t)
            except asyncio.CancelledError:
                pass
        atexit.register(stop)

    interface = setup_interface(args, influx_pusher, loop)

    # Provide a default ping() method, which ARTIQ calls regularly for
    # heartbeating purposes.
    if not hasattr(interface, "ping"):
        setattr(interface, "ping", lambda: True)

    rpc_server = Server({interface_name: interface}, builtin_terminate=True)
    loop.run_until_complete(rpc_server.start(bind_address_from_args(args), args.port))
    atexit_register_coroutine(rpc_server.stop)

    loop.run_until_complete(rpc_server.wait_terminate())
Exemple #8
0
def main():
    args = get_argparser().parse_args()

    root_logger = logging.getLogger()
    root_logger.setLevel(logging.NOTSET)
    source_adder = SourceFilter(logging.WARNING +
                                args.quiet*10 - args.verbose*10,
                                "ctlmgr({})".format(platform.node()))
    console_handler = logging.StreamHandler()
    console_handler.setFormatter(logging.Formatter(
        "%(levelname)s:%(source)s:%(name)s:%(message)s"))
    console_handler.addFilter(source_adder)
    root_logger.addHandler(console_handler)

    if os.name == "nt":
        loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(loop)
    else:
        loop = asyncio.get_event_loop()
    atexit.register(loop.close)

    logfwd = LogForwarder(args.server, args.port_logging,
                          args.retry_master)
    logfwd.addFilter(source_adder)
    root_logger.addHandler(logfwd)
    logfwd.start()
    atexit_register_coroutine(logfwd.stop)

    ctlmgr = ControllerManager(args.server, args.port_notify,
                               args.retry_master)
    ctlmgr.start()
    atexit_register_coroutine(ctlmgr.stop)

    class CtlMgrRPC:
        retry_now = ctlmgr.retry_now

    rpc_target = CtlMgrRPC()
    rpc_server = Server({"ctlmgr": rpc_target}, builtin_terminate=True)
    loop.run_until_complete(rpc_server.start(bind_address_from_args(args),
                                             args.port_control))
    atexit_register_coroutine(rpc_server.stop)

    loop.run_until_complete(rpc_server.wait_terminate())
Exemple #9
0
def main():
    args = get_argparser().parse_args()

    root_logger = logging.getLogger()
    root_logger.setLevel(logging.NOTSET)
    source_adder = SourceFilter(
        logging.WARNING + args.quiet * 10 - args.verbose * 10,
        "ctlmgr({})".format(platform.node()))
    console_handler = logging.StreamHandler()
    console_handler.setFormatter(
        logging.Formatter("%(levelname)s:%(source)s:%(name)s:%(message)s"))
    console_handler.addFilter(source_adder)
    root_logger.addHandler(console_handler)

    if os.name == "nt":
        loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(loop)
    else:
        loop = asyncio.get_event_loop()
    atexit.register(loop.close)

    logfwd = LogForwarder(args.server, args.port_logging, args.retry_master)
    logfwd.addFilter(source_adder)
    root_logger.addHandler(logfwd)
    logfwd.start()
    atexit_register_coroutine(logfwd.stop)

    ctlmgr = ControllerManager(args.server, args.port_notify,
                               args.retry_master)
    ctlmgr.start()
    atexit_register_coroutine(ctlmgr.stop)

    class CtlMgrRPC:
        retry_now = ctlmgr.retry_now

    rpc_target = CtlMgrRPC()
    rpc_server = Server({"ctlmgr": rpc_target}, builtin_terminate=True)
    loop.run_until_complete(
        rpc_server.start(bind_address_from_args(args), args.port_control))
    atexit_register_coroutine(rpc_server.stop)

    loop.run_until_complete(rpc_server.wait_terminate())
Exemple #10
0
    def __init__(self, smgr, datasets_sub, browse_root,
                 master_host, master_port):
        QtWidgets.QMainWindow.__init__(self)
        smgr.register(self)

        icon = QtGui.QIcon(os.path.join(artiq_dir, "gui", "logo.svg"))
        self.setWindowIcon(icon)
        self.setWindowTitle("ARTIQ Browser")

        qfm = QtGui.QFontMetrics(self.font())
        self.resize(140*qfm.averageCharWidth(), 38*qfm.lineSpacing())

        self.exit_request = asyncio.Event()

        self.setUnifiedTitleAndToolBarOnMac(True)

        self.experiments = experiments.ExperimentsArea(
            browse_root, datasets_sub)
        smgr.register(self.experiments)
        self.experiments.setHorizontalScrollBarPolicy(
            QtCore.Qt.ScrollBarAsNeeded)
        self.experiments.setVerticalScrollBarPolicy(
            QtCore.Qt.ScrollBarAsNeeded)
        self.setCentralWidget(self.experiments)

        self.files = files.FilesDock(datasets_sub, browse_root)
        smgr.register(self.files)

        self.files.dataset_activated.connect(
            self.experiments.dataset_activated)
        self.files.dataset_changed.connect(
            self.experiments.dataset_changed)

        self.applets = applets.AppletsDock(self, datasets_sub)
        smgr.register(self.applets)
        atexit_register_coroutine(self.applets.stop)

        self.datasets = datasets.DatasetsDock(
            datasets_sub, master_host, master_port)
        smgr.register(self.datasets)
        self.files.metadata_changed.connect(self.datasets.metadata_changed)

        self.log = log.LogDock(None, "log")
        smgr.register(self.log)
        self.log.setFeatures(self.log.DockWidgetMovable |
                             self.log.DockWidgetFloatable)

        self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.files)
        self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.applets)
        self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.datasets)
        self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.log)

        g = self.menuBar().addMenu("&Experiment")
        a = QtWidgets.QAction("&Open", self)
        a.setIcon(QtWidgets.QApplication.style().standardIcon(
            QtWidgets.QStyle.SP_DialogOpenButton))
        a.setShortcuts(QtGui.QKeySequence.Open)
        a.setStatusTip("Open an experiment")
        a.triggered.connect(self.experiments.select_experiment)
        g.addAction(a)

        g = self.menuBar().addMenu("&View")
        a = QtWidgets.QAction("Cascade", self)
        a.setStatusTip("Cascade experiment windows")
        a.triggered.connect(self.experiments.cascadeSubWindows)
        g.addAction(a)
        a = QtWidgets.QAction("Tile", self)
        a.setStatusTip("Tile experiment windows")
        a.triggered.connect(self.experiments.tileSubWindows)
        g.addAction(a)
Exemple #11
0
def main():
    args = get_argparser().parse_args()
    log_buffer = init_log(args)
    if os.name == "nt":
        loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(loop)
    else:
        loop = asyncio.get_event_loop()
    atexit.register(loop.close)

    device_db = DeviceDB(args.device_db)
    dataset_db = DatasetDB(args.dataset_db)
    dataset_db.start()
    atexit_register_coroutine(dataset_db.stop)
    worker_handlers = dict()

    if args.git:
        repo_backend = GitBackend(args.repository)
    else:
        repo_backend = FilesystemBackend(args.repository)
    experiment_db = ExperimentDB(repo_backend, worker_handlers)
    atexit.register(experiment_db.close)

    scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db)
    scheduler.start()
    atexit_register_coroutine(scheduler.stop)

    worker_handlers.update({
        "get_device_db": device_db.get_device_db,
        "get_device": device_db.get,
        "get_dataset": dataset_db.get,
        "update_dataset": dataset_db.update,
        "scheduler_submit": scheduler.submit,
        "scheduler_delete": scheduler.delete,
        "scheduler_request_termination": scheduler.request_termination,
        "scheduler_get_status": scheduler.get_status        
    })
    experiment_db.scan_repository_async()

    bind = bind_address_from_args(args)

    server_control = RPCServer({
        "master_device_db": device_db,
        "master_dataset_db": dataset_db,
        "master_schedule": scheduler,
        "master_experiment_db": experiment_db
    }, allow_parallel=True)
    loop.run_until_complete(server_control.start(
        bind, args.port_control))
    atexit_register_coroutine(server_control.stop)

    server_notify = Publisher({
        "schedule": scheduler.notifier,
        "devices": device_db.data,
        "datasets": dataset_db.data,
        "explist": experiment_db.explist,
        "explist_status": experiment_db.status,
        "log": log_buffer.data
    })
    loop.run_until_complete(server_notify.start(
        bind, args.port_notify))
    atexit_register_coroutine(server_notify.stop)

    server_logging = LoggingServer()
    loop.run_until_complete(server_logging.start(
        bind, args.port_logging))
    atexit_register_coroutine(server_logging.stop)

    logger.info("running, bound to %s", bind)
    loop.run_forever()
Exemple #12
0
def main():
    # initialize application
    args = get_argparser().parse_args()
    widget_log_handler = log.init_log(args, "dashboard")

    app = QtWidgets.QApplication(["ARTIQ Dashboard"])
    loop = QEventLoop(app)
    asyncio.set_event_loop(loop)
    atexit.register(loop.close)
    smgr = state.StateManager(args.db_file)

    # create connections to master
    rpc_clients = dict()
    for target in "schedule", "experiment_db", "dataset_db":
        client = AsyncioClient()
        loop.run_until_complete(client.connect_rpc(args.server, args.port_control, "master_" + target))
        atexit.register(client.close_rpc)
        rpc_clients[target] = client

    sub_clients = dict()
    for notifier_name, modelf in (
        ("explist", explorer.Model),
        ("explist_status", explorer.StatusUpdater),
        ("datasets", datasets.Model),
        ("schedule", schedule.Model),
    ):
        subscriber = ModelSubscriber(notifier_name, modelf)
        loop.run_until_complete(subscriber.connect(args.server, args.port_notify))
        atexit_register_coroutine(subscriber.close)
        sub_clients[notifier_name] = subscriber

    log_receiver = Receiver("log", [])
    loop.run_until_complete(log_receiver.connect(args.server, args.port_broadcast))
    atexit_register_coroutine(log_receiver.close)

    # initialize main window
    main_window = MainWindow(args.server)
    smgr.register(main_window)
    mdi_area = MdiArea()
    mdi_area.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
    mdi_area.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
    main_window.setCentralWidget(mdi_area)

    # create UI components
    expmgr = experiments.ExperimentManager(
        main_window,
        sub_clients["explist"],
        sub_clients["schedule"],
        rpc_clients["schedule"],
        rpc_clients["experiment_db"],
    )
    smgr.register(expmgr)
    d_shortcuts = shortcuts.ShortcutsDock(main_window, expmgr)
    smgr.register(d_shortcuts)
    d_explorer = explorer.ExplorerDock(
        expmgr,
        d_shortcuts,
        sub_clients["explist"],
        sub_clients["explist_status"],
        rpc_clients["schedule"],
        rpc_clients["experiment_db"],
    )
    smgr.register(d_explorer)

    d_datasets = datasets.DatasetsDock(sub_clients["datasets"], rpc_clients["dataset_db"])
    smgr.register(d_datasets)

    d_applets = applets.AppletsDock(main_window, sub_clients["datasets"])
    atexit_register_coroutine(d_applets.stop)
    smgr.register(d_applets)

    d_ttl_dds = moninj.MonInj()
    loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
    atexit_register_coroutine(d_ttl_dds.stop)

    d_schedule = schedule.ScheduleDock(rpc_clients["schedule"], sub_clients["schedule"])
    smgr.register(d_schedule)

    logmgr = log.LogDockManager(main_window)
    smgr.register(logmgr)
    log_receiver.notify_cbs.append(logmgr.append_message)
    widget_log_handler.callback = logmgr.append_message

    # lay out docks
    right_docks = [d_explorer, d_shortcuts, d_ttl_dds.ttl_dock, d_ttl_dds.dds_dock, d_datasets, d_applets]
    main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, right_docks[0])
    for d1, d2 in zip(right_docks, right_docks[1:]):
        main_window.tabifyDockWidget(d1, d2)
    main_window.addDockWidget(QtCore.Qt.BottomDockWidgetArea, d_schedule)

    # load/initialize state
    if os.name == "nt":
        # HACK: show the main window before creating applets.
        # Otherwise, the windows of those applets that are in detached
        # QDockWidgets fail to be embedded.
        main_window.show()
    smgr.load()
    smgr.start()
    atexit_register_coroutine(smgr.stop)

    # create first log dock if not already in state
    d_log0 = logmgr.first_log_dock()
    if d_log0 is not None:
        main_window.tabifyDockWidget(d_schedule, d_log0)

    logging.info("ARTIQ dashboard %s connected to %s", artiq_version, args.server)

    # run
    main_window.show()
    loop.run_until_complete(main_window.exit_request.wait())
Exemple #13
0
def main():
    # initialize application
    args = get_argparser().parse_args()
    widget_log_handler = log.init_log(args, "dashboard")

    if args.db_file is None:
        args.db_file = os.path.join(
            get_user_config_dir(),
            "artiq_dashboard_{server}_{port}.pyon".format(
                server=args.server.replace(":", "."), port=args.port_notify))

    app = QtWidgets.QApplication(["ARTIQ Dashboard"])
    loop = QEventLoop(app)
    asyncio.set_event_loop(loop)
    atexit.register(loop.close)
    smgr = state.StateManager(args.db_file)

    # create connections to master
    rpc_clients = dict()
    for target in "schedule", "experiment_db", "dataset_db":
        client = AsyncioClient()
        loop.run_until_complete(
            client.connect_rpc(args.server, args.port_control,
                               "master_" + target))
        atexit.register(client.close_rpc)
        rpc_clients[target] = client

    config = Client(args.server, args.port_control, "master_config")
    try:
        server_name = config.get_name()
    finally:
        config.close_rpc()

    disconnect_reported = False

    def report_disconnect():
        nonlocal disconnect_reported
        if not disconnect_reported:
            logging.error("connection to master lost, "
                          "restart dashboard to reconnect")
        disconnect_reported = True

    sub_clients = dict()
    for notifier_name, modelf in (("explist", explorer.Model),
                                  ("explist_status", explorer.StatusUpdater),
                                  ("datasets", datasets.Model),
                                  ("schedule", schedule.Model)):
        subscriber = ModelSubscriber(notifier_name, modelf, report_disconnect)
        loop.run_until_complete(
            subscriber.connect(args.server, args.port_notify))
        atexit_register_coroutine(subscriber.close)
        sub_clients[notifier_name] = subscriber

    broadcast_clients = dict()
    for target in "log", "ccb":
        client = Receiver(target, [], report_disconnect)
        loop.run_until_complete(
            client.connect(args.server, args.port_broadcast))
        atexit_register_coroutine(client.close)
        broadcast_clients[target] = client

    # initialize main window
    main_window = MainWindow(
        args.server if server_name is None else server_name)
    smgr.register(main_window)
    mdi_area = MdiArea()
    mdi_area.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
    mdi_area.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
    main_window.setCentralWidget(mdi_area)

    # create UI components
    expmgr = experiments.ExperimentManager(main_window, sub_clients["explist"],
                                           sub_clients["schedule"],
                                           rpc_clients["schedule"],
                                           rpc_clients["experiment_db"])
    smgr.register(expmgr)
    d_shortcuts = shortcuts.ShortcutsDock(main_window, expmgr)
    smgr.register(d_shortcuts)
    d_explorer = explorer.ExplorerDock(expmgr, d_shortcuts,
                                       sub_clients["explist"],
                                       sub_clients["explist_status"],
                                       rpc_clients["schedule"],
                                       rpc_clients["experiment_db"])
    smgr.register(d_explorer)

    d_datasets = datasets.DatasetsDock(sub_clients["datasets"],
                                       rpc_clients["dataset_db"])
    smgr.register(d_datasets)

    d_applets = applets_ccb.AppletsCCBDock(main_window,
                                           sub_clients["datasets"])
    atexit_register_coroutine(d_applets.stop)
    smgr.register(d_applets)
    broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify)

    d_ttl_dds = moninj.MonInj()
    loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
    atexit_register_coroutine(d_ttl_dds.stop)

    d_schedule = schedule.ScheduleDock(rpc_clients["schedule"],
                                       sub_clients["schedule"])
    smgr.register(d_schedule)

    logmgr = log.LogDockManager(main_window)
    smgr.register(logmgr)
    broadcast_clients["log"].notify_cbs.append(logmgr.append_message)
    widget_log_handler.callback = logmgr.append_message

    # lay out docks
    right_docks = [
        d_explorer, d_shortcuts, d_ttl_dds.ttl_dock, d_ttl_dds.dds_dock,
        d_ttl_dds.dac_dock, d_datasets, d_applets
    ]
    main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, right_docks[0])
    for d1, d2 in zip(right_docks, right_docks[1:]):
        main_window.tabifyDockWidget(d1, d2)
    main_window.addDockWidget(QtCore.Qt.BottomDockWidgetArea, d_schedule)

    # load/initialize state
    if os.name == "nt":
        # HACK: show the main window before creating applets.
        # Otherwise, the windows of those applets that are in detached
        # QDockWidgets fail to be embedded.
        main_window.show()
    smgr.load()
    smgr.start()
    atexit_register_coroutine(smgr.stop)

    # create first log dock if not already in state
    d_log0 = logmgr.first_log_dock()
    if d_log0 is not None:
        main_window.tabifyDockWidget(d_schedule, d_log0)

    if server_name is not None:
        server_description = server_name + " ({})".format(args.server)
    else:
        server_description = args.server
    logging.info("ARTIQ dashboard %s connected to %s", artiq_version,
                 server_description)

    # run
    main_window.show()
    loop.run_until_complete(main_window.exit_request.wait())
Exemple #14
0
def main():
    args = get_argparser().parse_args()
    log_buffer = init_log(args)
    if os.name == "nt":
        loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(loop)
    else:
        loop = asyncio.get_event_loop()
    atexit.register(loop.close)

    device_db = DeviceDB(args.device_db)
    dataset_db = DatasetDB(args.dataset_db)
    dataset_db.start()
    atexit_register_coroutine(dataset_db.stop)

    if args.git:
        repo_backend = GitBackend(args.repository)
    else:
        repo_backend = FilesystemBackend(args.repository)
    repository = Repository(repo_backend, device_db.get_device_db,
                            log_worker)
    atexit.register(repository.close)
    repository.scan_async()

    worker_handlers = {
        "get_device_db": device_db.get_device_db,
        "get_device": device_db.get,
        "get_dataset": dataset_db.get,
        "update_dataset": dataset_db.update,
        "log": log_worker
    }
    scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
    worker_handlers.update({
        "scheduler_submit": scheduler.submit,
        "scheduler_delete": scheduler.delete,
        "scheduler_request_termination": scheduler.request_termination,
        "scheduler_get_status": scheduler.get_status
    })
    scheduler.start()
    atexit_register_coroutine(scheduler.stop)

    server_control = RPCServer({
        "master_device_db": device_db,
        "master_dataset_db": dataset_db,
        "master_schedule": scheduler,
        "master_repository": repository
    })
    loop.run_until_complete(server_control.start(
        args.bind, args.port_control))
    atexit_register_coroutine(server_control.stop)

    server_notify = Publisher({
        "schedule": scheduler.notifier,
        "devices": device_db.data,
        "datasets": dataset_db.data,
        "explist": repository.explist,
        "log": log_buffer.data
    })
    loop.run_until_complete(server_notify.start(
        args.bind, args.port_notify))
    atexit_register_coroutine(server_notify.stop)

    server_logging = LoggingServer()
    loop.run_until_complete(server_logging.start(
        args.bind, args.port_logging))
    atexit_register_coroutine(server_logging.stop)

    loop.run_forever()
def setup_interface(args, influx_pusher, loop):
    i2c = I2CInterface(args.i2c_bus_idx, args.i2c_dev_addr)

    channels = dict()

    def add(name, ty) -> None:
        def bin_finished(values):
            if influx_pusher:
                influx_pusher.push(name, aggregate_stats_default(values))

        channels[ty] = ChunkedChannel(name, bin_finished, 256, 30, loop)

    # TODO: Better names once deduced what these actually are.
    add("imon_up", StateType.imon_up)
    add("imon_2", StateType.imon_2)
    add("umon_up", StateType.umon_up)
    add("ntc_2", StateType.ntc_2)
    add("ntc_1", StateType.ntc_1)
    add("imon_1", StateType.imon_1)
    add("u_48v", StateType.u_48v)
    add("u_24v", StateType.u_24v)
    add("ratio", StateType.ratio)

    callbacks = {}
    for ty, chunker in channels.items():
        callbacks[ty] = lambda x, chunker=chunker: chunker.push(x)
    callbacks[StateType.status_flags] = lambda x: logger.warn(
        "Status flags changed: %s", x)
    poller = Poller(i2c, callbacks)
    atexit_register_coroutine(poller.stop)

    class Interface:
        def __init__(self):
            # It seems like we can't easily figure out whether the high-voltage
            # stage is currently enabled, as the hardware only sends the
            # status flag field when it changes. Thus, we have to do the wait
            # the first time after startup (signified by None).
            self._set_point_volts = None

        async def get_voltage(self):
            """
            Return the last output voltage programmed.

            This is the value as stored by the controller; there is no hardware
            readback, so None is returned when the voltage has not been
            programmed yet.
            """
            return self._set_point_volts

        async def set_voltage(self, set_point_volts):
            """
            Set the output voltage, in volts.

            Even with the set point at zero, the hardware will output a small
            residual voltage (likely an artifact of imperfect calibration
            between the two bipolar stages used by the power supply). To
            completely extinguish the signal, as well as for safety reasons,
            set point 0.0 will also completely disable the output stage.

            :return: `False` if the setpoint is known to be the same as the
                currently programmed one; `True` otherwise.
            """

            if set_point_volts < 0.0:
                raise ValueError("Output voltage cannot be negative")
            if set_point_volts > args.voltage_factor:
                raise ValueError("Output voltage cannot exceed {} V".format(
                    args.voltage_factor))

            first = self._set_point_volts is None
            if set_point_volts > 0.0 and (first
                                          or self._set_point_volts == 0.0):
                await poller.enable_hv(True)

                # The hardware implements some sort of soft-start mechanism,
                # only enabling the output after about one second.
                await sleep(2.0)

            await poller.set_hv_set_point(set_point_volts / args.voltage_factor
                                          )

            if set_point_volts == 0.0 and (first
                                           or self._set_point_volts > 0.0):
                # Give the hardware some time to ramp down the voltage, as
                # vaguely suggested by the manufacturer's recommended shutdown
                # procedures (which assume that you control the power supply
                # using the chunky 10-turn knob on the front panel).
                await sleep(2.0)
                await poller.enable_hv(False)

            previous_volts = self._set_point_volts
            self._set_point_volts = set_point_volts

            return first or not math.isclose(previous_volts, set_point_volts)

        async def reset_fault(self):
            """
            Reset the hardware fault detection circuitry after a fault has
            occurred (e.g. over-current/-temperature), allowing the output
            to be enabled again.

            Also disables the output (which, in case the hardware is actually
            in a failure state, would have already occurred).
            """
            await self.set_voltage(0.0)
            await poller.reset_fault()

    rpc_interface = Interface()
    for c in channels.values():
        add_chunker_methods(rpc_interface, c)
    return rpc_interface
Exemple #16
0
def main():
    args = get_argparser().parse_args()
    log_forwarder = init_log(args)
    if os.name == "nt":
        loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(loop)
    else:
        loop = asyncio.get_event_loop()
    atexit.register(loop.close)
    bind = bind_address_from_args(args)

    server_broadcast = Broadcaster()
    loop.run_until_complete(server_broadcast.start(
        bind, args.port_broadcast))
    atexit_register_coroutine(server_broadcast.stop)

    log_forwarder.callback = (lambda msg:
        server_broadcast.broadcast("log", msg))
    def ccb_issue(service, *args, **kwargs):
        msg = {
            "service": service,
            "args": args,
            "kwargs": kwargs
        }
        server_broadcast.broadcast("ccb", msg)

    device_db = DeviceDB(args.device_db)
    dataset_db = DatasetDB(args.dataset_db)
    dataset_db.start()
    atexit_register_coroutine(dataset_db.stop)
    worker_handlers = dict()

    if args.git:
        repo_backend = GitBackend(args.repository)
    else:
        repo_backend = FilesystemBackend(args.repository)
    experiment_db = ExperimentDB(repo_backend, worker_handlers)
    atexit.register(experiment_db.close)

    scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db)
    scheduler.start()
    atexit_register_coroutine(scheduler.stop)

    worker_handlers.update({
        "get_device_db": device_db.get_device_db,
        "get_device": device_db.get,
        "get_dataset": dataset_db.get,
        "update_dataset": dataset_db.update,
        "scheduler_submit": scheduler.submit,
        "scheduler_delete": scheduler.delete,
        "scheduler_request_termination": scheduler.request_termination,
        "scheduler_get_status": scheduler.get_status,
        "scheduler_check_pause": scheduler.check_pause,
        "ccb_issue": ccb_issue,
    })
    experiment_db.scan_repository_async()

    server_control = RPCServer({
        "master_device_db": device_db,
        "master_dataset_db": dataset_db,
        "master_schedule": scheduler,
        "master_experiment_db": experiment_db
    }, allow_parallel=True)
    loop.run_until_complete(server_control.start(
        bind, args.port_control))
    atexit_register_coroutine(server_control.stop)

    server_notify = Publisher({
        "schedule": scheduler.notifier,
        "devices": device_db.data,
        "datasets": dataset_db.data,
        "explist": experiment_db.explist,
        "explist_status": experiment_db.status
    })
    loop.run_until_complete(server_notify.start(
        bind, args.port_notify))
    atexit_register_coroutine(server_notify.stop)

    server_logging = LoggingServer()
    loop.run_until_complete(server_logging.start(
        bind, args.port_logging))
    atexit_register_coroutine(server_logging.stop)

    logger.info("running, bound to %s", bind)
    loop.run_forever()
Exemple #17
0
def main():
    args = get_argparser().parse_args()
    log_forwarder = init_log(args)
    if os.name == "nt":
        loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(loop)
    else:
        loop = asyncio.get_event_loop()
    atexit.register(loop.close)
    bind = bind_address_from_args(args)

    server_broadcast = Broadcaster()
    loop.run_until_complete(server_broadcast.start(
        bind, args.port_broadcast))
    atexit_register_coroutine(server_broadcast.stop)

    log_forwarder.callback = (lambda msg:
        server_broadcast.broadcast("log", msg))
    def ccb_issue(service, *args, **kwargs):
        msg = {
            "service": service,
            "args": args,
            "kwargs": kwargs
        }
        server_broadcast.broadcast("ccb", msg)

    device_db = DeviceDB(args.device_db)
    dataset_db = DatasetDB(args.dataset_db)
    dataset_db.start()
    atexit_register_coroutine(dataset_db.stop)
    worker_handlers = dict()

    if args.git:
        repo_backend = GitBackend(args.repository)
    else:
        repo_backend = FilesystemBackend(args.repository)
    experiment_db = ExperimentDB(repo_backend, worker_handlers)
    atexit.register(experiment_db.close)

    scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db)
    scheduler.start()
    atexit_register_coroutine(scheduler.stop)

    worker_handlers.update({
        "get_device_db": device_db.get_device_db,
        "get_device": device_db.get,
        "get_dataset": dataset_db.get,
        "update_dataset": dataset_db.update,
        "scheduler_submit": scheduler.submit,
        "scheduler_delete": scheduler.delete,
        "scheduler_request_termination": scheduler.request_termination,
        "scheduler_get_status": scheduler.get_status,
        "scheduler_check_pause": scheduler.check_pause,
        "ccb_issue": ccb_issue,
    })
    experiment_db.scan_repository_async()

    server_control = RPCServer({
        "master_device_db": device_db,
        "master_dataset_db": dataset_db,
        "master_schedule": scheduler,
        "master_experiment_db": experiment_db
    }, allow_parallel=True)
    loop.run_until_complete(server_control.start(
        bind, args.port_control))
    atexit_register_coroutine(server_control.stop)

    server_notify = Publisher({
        "schedule": scheduler.notifier,
        "devices": device_db.data,
        "datasets": dataset_db.data,
        "explist": experiment_db.explist,
        "explist_status": experiment_db.status
    })
    loop.run_until_complete(server_notify.start(
        bind, args.port_notify))
    atexit_register_coroutine(server_notify.stop)

    server_logging = LoggingServer()
    loop.run_until_complete(server_logging.start(
        bind, args.port_logging))
    atexit_register_coroutine(server_logging.stop)

    print("ARTIQ master is now ready.")
    loop.run_forever()
Exemple #18
0
def main():
    # initialize application
    args = get_argparser().parse_args()
    widget_log_handler = log.init_log(args, "dashboard")

    if args.db_file is None:
        args.db_file = os.path.join(get_user_config_dir(),
                           "artiq_dashboard_{server}_{port}.pyon".format(
                            server=args.server.replace(":","."),
                            port=args.port_notify))

    app = QtWidgets.QApplication(["ARTIQ Dashboard"])
    loop = QEventLoop(app)
    asyncio.set_event_loop(loop)
    atexit.register(loop.close)
    smgr = state.StateManager(args.db_file)

    # create connections to master
    rpc_clients = dict()
    for target in "schedule", "experiment_db", "dataset_db":
        client = AsyncioClient()
        loop.run_until_complete(client.connect_rpc(
            args.server, args.port_control, "master_" + target))
        atexit.register(client.close_rpc)
        rpc_clients[target] = client

    config = Client(args.server, args.port_control, "master_config")
    try:
        server_name = config.get_name()
    finally:
        config.close_rpc()

    disconnect_reported = False
    def report_disconnect():
        nonlocal disconnect_reported
        if not disconnect_reported:
            logging.error("connection to master lost, "
                          "restart dashboard to reconnect")
        disconnect_reported = True

    sub_clients = dict()
    for notifier_name, modelf in (("explist", explorer.Model),
                                  ("explist_status", explorer.StatusUpdater),
                                  ("datasets", datasets.Model),
                                  ("schedule", schedule.Model)):
        subscriber = ModelSubscriber(notifier_name, modelf,
            report_disconnect)
        loop.run_until_complete(subscriber.connect(
            args.server, args.port_notify))
        atexit_register_coroutine(subscriber.close)
        sub_clients[notifier_name] = subscriber

    broadcast_clients = dict()
    for target in "log", "ccb":
        client = Receiver(target, [], report_disconnect)
        loop.run_until_complete(client.connect(
            args.server, args.port_broadcast))
        atexit_register_coroutine(client.close)
        broadcast_clients[target] = client

    # initialize main window
    main_window = MainWindow(args.server if server_name is None else server_name)
    smgr.register(main_window)
    mdi_area = MdiArea()
    mdi_area.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
    mdi_area.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
    main_window.setCentralWidget(mdi_area)

    # create UI components
    expmgr = experiments.ExperimentManager(main_window,
                                           sub_clients["explist"],
                                           sub_clients["schedule"],
                                           rpc_clients["schedule"],
                                           rpc_clients["experiment_db"])
    smgr.register(expmgr)
    d_shortcuts = shortcuts.ShortcutsDock(main_window, expmgr)
    smgr.register(d_shortcuts)
    d_explorer = explorer.ExplorerDock(expmgr, d_shortcuts,
                                       sub_clients["explist"],
                                       sub_clients["explist_status"],
                                       rpc_clients["schedule"],
                                       rpc_clients["experiment_db"])
    smgr.register(d_explorer)

    d_datasets = datasets.DatasetsDock(sub_clients["datasets"],
                                       rpc_clients["dataset_db"])
    smgr.register(d_datasets)

    d_applets = applets_ccb.AppletsCCBDock(main_window, sub_clients["datasets"])
    atexit_register_coroutine(d_applets.stop)
    smgr.register(d_applets)
    broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify)

    d_ttl_dds = moninj.MonInj()
    loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
    atexit_register_coroutine(d_ttl_dds.stop)

    d_schedule = schedule.ScheduleDock(
        rpc_clients["schedule"], sub_clients["schedule"])
    smgr.register(d_schedule)

    logmgr = log.LogDockManager(main_window)
    smgr.register(logmgr)
    broadcast_clients["log"].notify_cbs.append(logmgr.append_message)
    widget_log_handler.callback = logmgr.append_message

    # lay out docks
    right_docks = [
        d_explorer, d_shortcuts,
        d_ttl_dds.ttl_dock, d_ttl_dds.dds_dock, d_ttl_dds.dac_dock,
        d_datasets, d_applets
    ]
    main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, right_docks[0])
    for d1, d2 in zip(right_docks, right_docks[1:]):
        main_window.tabifyDockWidget(d1, d2)
    main_window.addDockWidget(QtCore.Qt.BottomDockWidgetArea, d_schedule)

    # load/initialize state
    if os.name == "nt":
        # HACK: show the main window before creating applets.
        # Otherwise, the windows of those applets that are in detached
        # QDockWidgets fail to be embedded.
        main_window.show()
    smgr.load()
    smgr.start()
    atexit_register_coroutine(smgr.stop)

    # create first log dock if not already in state
    d_log0 = logmgr.first_log_dock()
    if d_log0 is not None:
        main_window.tabifyDockWidget(d_schedule, d_log0)


    if server_name is not None:
        server_description = server_name + " ({})".format(args.server)
    else:
        server_description = args.server
    logging.info("ARTIQ dashboard %s connected to %s",
                 artiq_version, server_description)

    # run
    main_window.show()
    loop.run_until_complete(main_window.exit_request.wait())
Exemple #19
0
    def start(self):
        """ Connect to the WaND servers """
        def init_cb(db, mod):
            db.update(mod)
            return db

        async def subscriber_reconnect(self, server, db):
            logger.info("No connection to server '{}'".format(server))

            for _, display in self.laser_displays.items():
                if display.server == server:
                    display.server = ""
                    display.wake_loop.set()

            server_cfg = self.config["servers"][server]
            subscriber = self.subscribers[server][db]["subscriber"]

            if self.win.exit_request.is_set():
                return

            def make_fut(self, server, db):
                fut = asyncio.ensure_future(
                    subscriber_reconnect(self, server, db))
                self.subscribers[server][db]["connected"] = False
                self.subscribers[server][db]["future"] = fut

            subscriber.disconnect_cb = functools.partial(
                make_fut, self, server, db)

            while not self.win.exit_request.is_set():
                try:
                    await subscriber.connect(server_cfg["host"],
                                             server_cfg["notify"])

                    logger.info("Reconnected to server '{}'".format(server))
                    break
                except OSError:
                    logger.info(
                        "could not connect to '{}' retry in 10s...".format(
                            server))
                    await asyncio.sleep(10)

        for server, server_cfg in self.config["servers"].items():
            self.subscribers[server] = {}

            # ask the servers to keep us updated with changes to laser settings
            # (exposures, references, etc)
            subscriber = Subscriber(
                "laser_db", functools.partial(init_cb, self.laser_db),
                functools.partial(self.notifier_cb, "laser_db", server))
            fut = asyncio.ensure_future(
                subscriber_reconnect(self, server, "laser_db"))
            self.subscribers[server]["laser_db"] = {
                "subscriber": subscriber,
                "connected": False,
                "future": fut
            }

            # ask the servers to keep us updated with the latest frequency data
            subscriber = Subscriber(
                "freq_db", functools.partial(init_cb, self.freq_db),
                functools.partial(self.notifier_cb, "freq_db", server))
            fut = asyncio.ensure_future(
                subscriber_reconnect(self, server, "freq_db"))
            self.subscribers[server]["freq_db"] = {
                "subscriber": subscriber,
                "connected": False,
                "future": fut
            }

            # ask the servers to keep us updated with the latest osa traces
            subscriber = Subscriber(
                "osa_db", functools.partial(init_cb, self.osa_db),
                functools.partial(self.notifier_cb, "osa_db", server))
            fut = asyncio.ensure_future(
                subscriber_reconnect(self, server, "osa_db"))
            self.subscribers[server]["osa_db"] = {
                "subscriber": subscriber,
                "connected": False,
                "future": fut
            }

        atexit_register_coroutine(self.shutdown)

        self.win.showMaximized()
        atexit.register(self.win.exit_request.set)
        self.loop.run_until_complete(self.win.exit_request.wait())