Example #1
0
def _build_experiment(device_mgr, dataset_mgr, args):
    if hasattr(args, "file"):
        is_elf = args.file.endswith(".elf")
        is_ll  = args.file.endswith(".ll")
        is_bc  = args.file.endswith(".bc")
        if is_elf or is_ll or is_bc:
            if args.arguments:
                raise ValueError("arguments not supported for precompiled kernels")
            if args.experiment:
                raise ValueError("experiment-by-name not supported "
                                 "for precompiled kernels")
        if is_elf:
            return ELFRunner(device_mgr, dataset_mgr, file=args.file)
        elif is_ll:
            return LLVMIRRunner(device_mgr, dataset_mgr, file=args.file)
        elif is_bc:
            return LLVMBitcodeRunner(device_mgr, dataset_mgr, file=args.file)
        else:
            import_cache.install_hook()
            module = file_import(args.file, prefix="artiq_run_")
        file = args.file
    else:
        module = sys.modules["__main__"]
        file = getattr(module, "__file__")
    exp = get_experiment(module, args.experiment)
    arguments = parse_arguments(args.arguments)
    expid = {
        "file": file,
        "experiment": args.experiment,
        "arguments": arguments
    }
    device_mgr.virtual_devices["scheduler"].expid = expid
    argument_mgr = ProcessArgumentManager(arguments)
    return exp((device_mgr, dataset_mgr, argument_mgr))
Example #2
0
def _build_experiment(device_mgr, dataset_mgr, args):
    arguments = parse_arguments(args.arguments)
    argument_mgr = ProcessArgumentManager(arguments)
    managers = (device_mgr, dataset_mgr, argument_mgr)
    if hasattr(args, "file"):
        is_elf = args.file.endswith(".elf")
        is_ll = args.file.endswith(".ll")
        is_bc = args.file.endswith(".bc")
        if is_elf or is_ll or is_bc:
            if args.arguments:
                raise ValueError(
                    "arguments not supported for precompiled kernels")
            if args.experiment:
                raise ValueError("experiment-by-name not supported "
                                 "for precompiled kernels")
        if is_elf:
            return ELFRunner(managers, file=args.file)
        elif is_ll:
            return LLVMIRRunner(managers, file=args.file)
        elif is_bc:
            return LLVMBitcodeRunner(managers, file=args.file)
        else:
            import_cache.install_hook()
            module = file_import(args.file, prefix="artiq_run_")
        file = args.file
    else:
        module = sys.modules["__main__"]
        file = getattr(module, "__file__")
    expid = {
        "file": file,
        "experiment": args.experiment,
        "arguments": arguments
    }
    device_mgr.virtual_devices["scheduler"].expid = expid
    return get_experiment(module, args.experiment)(managers)
Example #3
0
def main():
    global ipc

    multiline_log_config(level=int(sys.argv[2]))
    ipc = pipe_ipc.ChildComm(sys.argv[1])

    start_time = None
    run_time = None
    rid = None
    expid = None
    exp = None
    exp_inst = None
    repository_path = None

    device_mgr = DeviceManager(ParentDeviceDB,
                               virtual_devices={
                                   "scheduler": Scheduler(),
                                   "ccb": CCB()
                               })
    dataset_mgr = DatasetManager(ParentDatasetDB)

    import_cache.install_hook()

    try:
        while True:
            obj = get_object()
            action = obj["action"]
            if action == "build":
                start_time = time.time()
                rid = obj["rid"]
                expid = obj["expid"]
                if obj["wd"] is not None:
                    # Using repository
                    experiment_file = os.path.join(obj["wd"], expid["file"])
                    repository_path = obj["wd"]
                else:
                    experiment_file = expid["file"]
                    repository_path = None
                setup_diagnostics(experiment_file, repository_path)
                exp = get_exp(experiment_file, expid["class_name"])
                device_mgr.virtual_devices["scheduler"].set_run_info(
                    rid, obj["pipeline_name"], expid, obj["priority"])
                start_local_time = time.localtime(start_time)
                dirname = os.path.join(
                    "results", time.strftime("%Y-%m-%d", start_local_time),
                    time.strftime("%H", start_local_time))
                os.makedirs(dirname, exist_ok=True)
                os.chdir(dirname)
                argument_mgr = ProcessArgumentManager(expid["arguments"])
                exp_inst = exp((device_mgr, dataset_mgr, argument_mgr, {}))
                put_object({"action": "completed"})
            elif action == "prepare":
                exp_inst.prepare()
                put_object({"action": "completed"})
            elif action == "run":
                run_time = time.time()
                exp_inst.run()
                put_object({"action": "completed"})
            elif action == "analyze":
                try:
                    exp_inst.analyze()
                except:
                    # make analyze failure non-fatal, as we may still want to
                    # write results afterwards
                    put_exception_report()
                else:
                    put_object({"action": "completed"})
            elif action == "write_results":
                filename = "{:09}-{}.h5".format(rid, exp.__name__)
                with h5py.File(filename, "w") as f:
                    dataset_mgr.write_hdf5(f)
                    f["artiq_version"] = artiq_version
                    f["rid"] = rid
                    f["start_time"] = start_time
                    f["run_time"] = run_time
                    f["expid"] = pyon.encode(expid)
                put_object({"action": "completed"})
            elif action == "examine":
                examine(ExamineDeviceMgr, ExamineDatasetMgr, obj["file"])
                put_object({"action": "completed"})
            elif action == "terminate":
                break
    except:
        put_exception_report()
    finally:
        device_mgr.close_devices()
        ipc.close()
Example #4
0
def main():
    global ipc

    multiline_log_config(level=int(sys.argv[2]))
    ipc = pipe_ipc.ChildComm(sys.argv[1])

    start_time = None
    run_time = None
    rid = None
    expid = None
    exp = None
    exp_inst = None
    repository_path = None

    device_mgr = DeviceManager(ParentDeviceDB,
                               virtual_devices={"scheduler": Scheduler(),
                                                "ccb": CCB()})
    dataset_mgr = DatasetManager(ParentDatasetDB)

    import_cache.install_hook()

    try:
        while True:
            obj = get_object()
            action = obj["action"]
            if action == "build":
                start_time = time.time()
                rid = obj["rid"]
                expid = obj["expid"]
                if obj["wd"] is not None:
                    # Using repository
                    experiment_file = os.path.join(obj["wd"], expid["file"])
                    repository_path = obj["wd"]
                else:
                    experiment_file = expid["file"]
                    repository_path = None
                setup_diagnostics(experiment_file, repository_path)
                exp = get_exp(experiment_file, expid["class_name"])
                device_mgr.virtual_devices["scheduler"].set_run_info(
                    rid, obj["pipeline_name"], expid, obj["priority"])
                start_local_time = time.localtime(start_time)
                dirname = os.path.join("results",
                                   time.strftime("%Y-%m-%d", start_local_time),
                                   time.strftime("%H", start_local_time))
                os.makedirs(dirname, exist_ok=True)
                os.chdir(dirname)
                argument_mgr = ProcessArgumentManager(expid["arguments"])
                exp_inst = exp((device_mgr, dataset_mgr, argument_mgr))
                put_object({"action": "completed"})
            elif action == "prepare":
                exp_inst.prepare()
                put_object({"action": "completed"})
            elif action == "run":
                run_time = time.time()
                exp_inst.run()
                put_object({"action": "completed"})
            elif action == "analyze":
                try:
                    exp_inst.analyze()
                except:
                    # make analyze failure non-fatal, as we may still want to
                    # write results afterwards
                    put_exception_report()
                else:
                    put_object({"action": "completed"})
            elif action == "write_results":
                filename = "{:09}-{}.h5".format(rid, exp.__name__)
                with h5py.File(filename, "w") as f:
                    dataset_mgr.write_hdf5(f)
                    f["artiq_version"] = artiq_version
                    f["rid"] = rid
                    f["start_time"] = start_time
                    f["run_time"] = run_time
                    f["expid"] = pyon.encode(expid)
                put_object({"action": "completed"})
            elif action == "examine":
                examine(ExamineDeviceMgr, ExamineDatasetMgr, obj["file"])
                put_object({"action": "completed"})
            elif action == "terminate":
                break
    except:
        put_exception_report()
    finally:
        device_mgr.close_devices()
        ipc.close()
Example #5
0
def main():
    global ipc

    multiline_log_config(level=int(sys.argv[2]))
    ipc = pipe_ipc.ChildComm(sys.argv[1])

    start_time = None
    rid = None
    expid = None
    exp = None
    exp_inst = None
    repository_path = None

    device_mgr = DeviceManager(ParentDeviceDB,
                               virtual_devices={
                                   "scheduler": Scheduler(),
                                   "ccb": CCB()
                               })
    dataset_mgr = DatasetManager(ParentDatasetDB)

    import_cache.install_hook()

    try:
        while True:
            obj = get_object()
            action = obj["action"]
            if action == "build":
                start_time = time.localtime()
                rid = obj["rid"]
                expid = obj["expid"]
                if obj["wd"] is not None:
                    # Using repository
                    experiment_file = os.path.join(obj["wd"], expid["file"])
                    repository_path = obj["wd"]
                else:
                    experiment_file = expid["file"]
                    repository_path = None
                setup_diagnostics(experiment_file, repository_path)
                exp = get_exp(experiment_file, expid["class_name"])
                device_mgr.virtual_devices["scheduler"].set_run_info(
                    rid, obj["pipeline_name"], expid, obj["priority"])
                dirname = os.path.join("results",
                                       time.strftime("%Y-%m-%d", start_time),
                                       time.strftime("%H", start_time))
                os.makedirs(dirname, exist_ok=True)
                os.chdir(dirname)
                argument_mgr = ProcessArgumentManager(expid["arguments"])
                exp_inst = exp((device_mgr, dataset_mgr, argument_mgr))
                put_object({"action": "completed"})
            elif action == "prepare":
                exp_inst.prepare()
                put_object({"action": "completed"})
            elif action == "run":
                exp_inst.run()
                put_object({"action": "completed"})
            elif action == "analyze":
                exp_inst.analyze()
                put_object({"action": "completed"})
            elif action == "write_results":
                filename = "{:09}-{}.h5".format(rid, exp.__name__)
                with h5py.File(filename, "w") as f:
                    dataset_mgr.write_hdf5(f)
                    f["artiq_version"] = artiq_version
                    f["rid"] = rid
                    f["start_time"] = int(time.mktime(start_time))
                    f["expid"] = pyon.encode(expid)
                put_object({"action": "completed"})
            elif action == "examine":
                examine(ExamineDeviceMgr, ParentDatasetDB, obj["file"])
                put_object({"action": "completed"})
            elif action == "terminate":
                break
    except Exception as exc:
        # When we get CompileError, a more suitable diagnostic has already
        # been printed.
        if not isinstance(exc, CompileError):
            short_exc_info = type(exc).__name__
            exc_str = str(exc)
            if exc_str:
                short_exc_info += ": " + exc_str.splitlines()[0]
            lines = ["Terminating with exception (" + short_exc_info + ")\n"]
            if hasattr(exc, "artiq_core_exception"):
                lines.append(str(exc.artiq_core_exception))
            if hasattr(exc, "parent_traceback"):
                lines += exc.parent_traceback
                lines += traceback.format_exception_only(type(exc), exc)
            logging.error("".join(lines).rstrip(),
                          exc_info=not hasattr(exc, "parent_traceback"))
        put_object({"action": "exception"})
    finally:
        device_mgr.close_devices()
        ipc.close()
Example #6
0
def main():
    global ipc

    multiline_log_config(level=int(sys.argv[2]))
    ipc = pipe_ipc.ChildComm(sys.argv[1])

    start_time = None
    run_time = None
    rid = None
    expid = None
    exp = None
    exp_inst = None
    repository_path = None

    def write_results():
        filename = "{:09}-{}.h5".format(rid, exp.__name__)
        with h5py.File(filename, "w") as f:
            dataset_mgr.write_hdf5(f)
            f["artiq_version"] = artiq_version
            f["rid"] = rid
            f["start_time"] = start_time
            f["run_time"] = run_time
            f["expid"] = pyon.encode(expid)

    device_mgr = DeviceManager(ParentDeviceDB,
                               virtual_devices={
                                   "scheduler": Scheduler(),
                                   "ccb": CCB()
                               })
    dataset_mgr = DatasetManager(ParentDatasetDB)

    import_cache.install_hook()

    try:
        while True:
            obj = get_object()
            action = obj["action"]
            if action == "build":
                start_time = time.time()
                rid = obj["rid"]
                expid = obj["expid"]
                if obj["wd"] is not None:
                    # Using repository
                    experiment_file = os.path.join(obj["wd"], expid["file"])
                    repository_path = obj["wd"]
                else:
                    experiment_file = expid["file"]
                    repository_path = None
                setup_diagnostics(experiment_file, repository_path)
                exp = get_experiment(experiment_file, expid["class_name"])
                device_mgr.virtual_devices["scheduler"].set_run_info(
                    rid, obj["pipeline_name"], expid, obj["priority"])
                start_local_time = time.localtime(start_time)
                rootdir = os.path.join(os.path.expanduser("~"), "data")
                dirname = os.path.join(
                    rootdir, time.strftime("%Y-%m-%d", start_local_time))
                os.makedirs(dirname, exist_ok=True)
                os.chdir(dirname)
                argument_mgr = ProcessArgumentManager(expid["arguments"])
                exp_inst = exp((device_mgr, dataset_mgr, argument_mgr, {}))
                put_completed()
            elif action == "prepare":
                exp_inst.prepare()
                put_completed()
            elif action == "run":
                current_time = datetime.datetime.now().strftime("%H%M_%S")
                run_time = time.time()
                try:
                    exp_inst.run()
                except:
                    # Only write results in run() on failure; on success wait
                    # for end of analyze stage.
                    write_results()
                    raise
                put_completed()
            elif action == "analyze":
                try:
                    exp_inst.analyze()
                except:
                    # make analyze failure non-fatal, as we may still want to
                    # write results afterwards
                    put_exception_report()
                else:
                    put_object({"action": "completed"})
            elif action == "write_results":
                if hasattr(exp_inst, "archive"):
                    if not exp_inst.archive:
                        put_object({"action": "completed"})
                        continue
                path = os.path.join(dirname, exp.__name__)
                if not os.path.exists(path):
                    os.mkdir(path)
                if hasattr(exp_inst, "filename"):
                    filename = list(exp_inst.filename.values())[0]
                else:
                    filename = "raw-data_{}.h5".format(current_time)
                file_ = os.path.join(path, filename)
                with h5py.File(file_, "a") as f:
                    dataset_mgr.write_hdf5(f)
                    f["artiq_version"] = artiq_version
                    f["rid"] = rid
                    f["start_time"] = start_time
                    f["run_time"] = run_time
                    f["expid"] = pyon.encode(expid)
                put_object({"action": "completed"})
            elif action == "examine":
                examine(ExamineDeviceMgr, ExamineDatasetMgr, obj["file"])
                put_completed()
            elif action == "terminate":
                break
    except:
        put_exception_report()
    finally:
        device_mgr.close_devices()
        ipc.close()
Example #7
0
def main():
    global ipc

    multiline_log_config(level=int(sys.argv[2]))
    ipc = pipe_ipc.ChildComm(sys.argv[1])

    start_time = None
    rid = None
    expid = None
    exp = None
    exp_inst = None
    repository_path = None

    device_mgr = DeviceManager(ParentDeviceDB,
                               virtual_devices={"scheduler": Scheduler()})
    dataset_mgr = DatasetManager(ParentDatasetDB)

    import_cache.install_hook()

    try:
        while True:
            obj = get_object()
            action = obj["action"]
            if action == "build":
                start_time = time.localtime()
                rid = obj["rid"]
                expid = obj["expid"]
                if obj["wd"] is not None:
                    # Using repository
                    experiment_file = os.path.join(obj["wd"], expid["file"])
                    repository_path = obj["wd"]
                else:
                    experiment_file = expid["file"]
                    repository_path = None
                setup_diagnostics(experiment_file, repository_path)
                exp = get_exp(experiment_file, expid["class_name"])
                device_mgr.virtual_devices["scheduler"].set_run_info(
                    rid, obj["pipeline_name"], expid, obj["priority"])
                dirname = os.path.join("results",
                                       time.strftime("%Y-%m-%d", start_time),
                                       time.strftime("%H", start_time))
                os.makedirs(dirname, exist_ok=True)
                os.chdir(dirname)
                argument_mgr = ProcessArgumentManager(expid["arguments"])
                exp_inst = exp((device_mgr, dataset_mgr, argument_mgr))
                put_object({"action": "completed"})
            elif action == "prepare":
                exp_inst.prepare()
                put_object({"action": "completed"})
            elif action == "run":
                exp_inst.run()
                put_object({"action": "completed"})
            elif action == "analyze":
                exp_inst.analyze()
                put_object({"action": "completed"})
            elif action == "write_results":
                filename = "{:09}-{}.h5".format(rid, exp.__name__)
                with h5py.File(filename, "w") as f:
                    dataset_mgr.write_hdf5(f.create_group("datasets"))
                    f["artiq_version"] = artiq_version
                    f["rid"] = rid
                    f["start_time"] = int(time.mktime(start_time))
                    f["expid"] = pyon.encode(expid)
                put_object({"action": "completed"})
            elif action == "examine":
                examine(ExamineDeviceMgr, ParentDatasetDB, obj["file"])
                put_object({"action": "completed"})
            elif action == "terminate":
                break
    except Exception as exc:
        # When we get CompileError, a more suitable diagnostic has already
        # been printed.
        if not isinstance(exc, CompileError):
            short_exc_info = type(exc).__name__
            exc_str = str(exc)
            if exc_str:
                short_exc_info += ": " + exc_str.splitlines()[0]
            lines = ["Terminating with exception ("+short_exc_info+")\n"]
            if hasattr(exc, "artiq_core_exception"):
                lines.append(str(exc.artiq_core_exception))
            if hasattr(exc, "parent_traceback"):
                lines += exc.parent_traceback
                lines += traceback.format_exception_only(type(exc), exc)
            logging.error("".join(lines).rstrip(),
                          exc_info=not hasattr(exc, "parent_traceback"))
        put_object({"action": "exception"})
    finally:
        device_mgr.close_devices()
        ipc.close()