def test_shared(self): config_file = '%(cp)s/test_confighelper.json' % { 'cp': current_path } ch = ConfigHelper(config_file, TEST_HOST_NAME) shared_value = ch.get_conf('test'); self.assertEqual(shared_value, True)
def test_host_number(self): config_file = '%(cp)s/test_confighelper.json' % { 'cp': current_path } ch = ConfigHelper(config_file, TEST_HOST_NAME) host_num = ch.get_host_conf('number'); self.assertEqual(host_num, 3)
def test_host_dict(self): config_file = '%(cp)s/test_confighelper.json' % { 'cp': current_path } ch = ConfigHelper(config_file, TEST_HOST_NAME) host_dict = ch.get_host_conf('dict'); self.assertTrue(host_dict == { 'name_one': 'value_one' })
def test_host_list(self): config_file = '%(cp)s/test_confighelper.json' % { 'cp': current_path } ch = ConfigHelper(config_file, TEST_HOST_NAME) host_list = ch.get_host_conf('list'); self.assertEqual(host_list, [ 1, 2, 3 ] )
def test_host_string(self): config_file = '%(cp)s/test_confighelper.json' % { 'cp': current_path } ch = ConfigHelper(config_file, TEST_HOST_NAME) host_str = ch.get_host_conf('string'); self.assertEqual(host_str, 'test')
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.runner = AsyncRunner(IPVersion.All) hi = self.server.get_host_info() addresses: Optional[List[bytes]] = [socket.inet_aton(hi["address"])] self.bound_all = hi["address"] == "0.0.0.0" self.service_info = self._build_service_info(addresses) if self.bound_all: self.server.register_event_handler( "machine:net_state_changed", self._update_service)
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.gpiod: Any = load_system_module("gpiod") GpioEvent.init_constants(self.gpiod) self.chips: Dict[str, Any] = {} self.reserved_gpios: Dict[str, GpioBase] = {} version: str = self.gpiod.version_string() self.gpiod_version = tuple(int(v) for v in version.split('.')) self.server.add_log_rollover_item("gpiod_version", f"libgpiod version: {version}")
def _get_path_option(self, config: ConfigHelper, option: str) -> str: path: Optional[str] = config.get(option, None) if path is None: return "" expanded = os.path.abspath(os.path.expanduser(path)) if not os.path.exists(expanded): raise self.server.error( f"Invalid path for option '{option}', " f"{path} does not exist") return expanded
def _parse_pin(self, config: ConfigHelper) -> Tuple[int, str, bool]: pin = cfg_pin = config.get("pin") invert = False if pin[0] == "!": pin = pin[1:] invert = True chip_id: str = "gpiochip0" pin_parts = pin.split("/") if len(pin_parts) == 2: chip_id, pin = pin_parts elif len(pin_parts) == 1: pin = pin_parts[0] # Verify pin if not chip_id.startswith("gpiochip") or \ not chip_id[-1].isdigit() or \ not pin.startswith("gpio") or \ not pin[4:].isdigit(): raise config.error(f"Invalid Power Pin configuration: {cfg_pin}") pin_id = int(pin[4:]) return pin_id, chip_id, invert
def __init__(self, config: ConfigHelper, cmd_helper: CommandHelper, name: Optional[str] = None, prefix: str = "", cfg_hash: Optional[str] = None) -> None: if name is None: name = config.get_name().split()[-1] self.name = name if prefix: prefix = f"{prefix} {self.name}: " self.prefix = prefix self.server = config.get_server() self.cmd_helper = cmd_helper self.refresh_interval = cmd_helper.get_refresh_interval() refresh_interval = config.getint('refresh_interval', None) if refresh_interval is not None: self.refresh_interval = refresh_interval * 60 * 60 if cfg_hash is None: cfg_hash = config.get_hash().hexdigest() self.cfg_hash = cfg_hash
def gpio_config(test_config: ConfigHelper, monkeypatch: pytest.MonkeyPatch) -> ConfigHelper: def load_gpio_mock(name: str) -> MockGpiod: return MockGpiod() monkeypatch.setattr(gpio, "load_system_module", load_gpio_mock) yield test_config server = test_config.get_server() gpio_comp = server.lookup_component("gpio", None) if gpio_comp is not None: gpio_comp.close() gpio_comp.reserved_gpios = {}
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.file_manager: FileManager = self.server.lookup_component( 'file_manager') self.request_lock = Lock() database: DBComp = self.server.lookup_component("database") self.job_totals: Dict[str, float] = database.get_item( "moonraker", "history.job_totals", { 'total_jobs': 0, 'total_time': 0., 'total_print_time': 0., 'total_filament_used': 0., 'longest_job': 0., 'longest_print': 0. }).result() self.server.register_event_handler( "server:klippy_disconnect", self._handle_disconnect) self.server.register_event_handler( "server:klippy_shutdown", self._handle_shutdown) self.server.register_event_handler( "job_state:started", self._on_job_started) self.server.register_event_handler( "job_state:complete", self._on_job_complete) self.server.register_event_handler( "job_state:cancelled", self._on_job_cancelled) self.server.register_event_handler( "job_state:standby", self._on_job_standby) self.server.register_event_handler( "job_state:error", self._on_job_error) self.server.register_notification("history:history_changed") self.server.register_endpoint( "/server/history/job", ['GET', 'DELETE'], self._handle_job_request) self.server.register_endpoint( "/server/history/list", ['GET'], self._handle_jobs_list) self.server.register_endpoint( "/server/history/totals", ['GET'], self._handle_job_totals) self.server.register_endpoint( "/server/history/reset_totals", ['POST'], self._handle_job_total_reset) database.register_local_namespace(HIST_NAMESPACE) self.history_ns = database.wrap_namespace(HIST_NAMESPACE, parse_keys=False) self.current_job: Optional[PrinterJob] = None self.current_job_id: Optional[str] = None self.next_job_id: int = 0 self.cached_job_ids = self.history_ns.keys().result() if self.cached_job_ids: self.next_job_id = int(self.cached_job_ids[-1], 16) + 1
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() secrets: Secrets = self.server.load_component(config, 'secrets') self.jenv = jinja2.Environment('{%', '%}', '{', '}') self.async_env = jinja2.Environment('{%', '%}', '{', '}', enable_async=True) self.jenv.add_extension("jinja2.ext.do") self.jenv.filters['fromjson'] = json.loads self.async_env.add_extension("jinja2.ext.do") self.async_env.filters['fromjson'] = json.loads self.add_environment_global('raise_error', self._raise_error) self.add_environment_global('secrets', secrets)
def __init__(self, config: ConfigHelper) -> None: name_parts = config.get_name().split(maxsplit=1) if len(name_parts) != 2: raise config.error(f"Invalid Section Name: {config.get_name()}") self.server = config.get_server() self.name = name_parts[1] self.type: str = config.get('type') self.state: str = "init" self.locked_while_printing = config.getboolean('locked_while_printing', False) self.off_when_shutdown = config.getboolean('off_when_shutdown', False) self.restart_delay = 1. self.klipper_restart = config.getboolean( 'restart_klipper_when_powered', False) if self.klipper_restart: self.restart_delay = config.getfloat('restart_delay', 1.) if self.restart_delay < .000001: raise config.error("Option 'restart_delay' must be above 0.0")
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.bus: Optional[MessageBus] = None self.polkit: Optional[ProxyInterface] = None self.warned: bool = False proc_data = pathlib.Path(f"/proc/self/stat").read_text() start_clk_ticks = int(proc_data.split()[21]) self.polkit_subject = [ "unix-process", { "pid": dbus_next.Variant("u", os.getpid()), "start-time": dbus_next.Variant("t", start_clk_ticks) } ]
def __init__(self, config: ConfigHelper, cmd_helper: CommandHelper, app_params: Optional[Dict[str, Any]] = None ) -> None: super().__init__(config, cmd_helper, app_params) self.official_repo: str = "?" self.owner: str = "?" # Extract repo from origin for validation match = re.match(r"https?://(?:www\.)?github.com/([^/]+/[^.]+)", self.origin) if match is not None: self.official_repo = match.group(1) self.owner = self.official_repo.split('/')[0] else: raise config.error( "Invalid url set for 'origin' option in section " f"[{config.get_name()}]. Unable to extract owner/repo.") self.host_repo: str = config.get('host_repo', self.official_repo) storage = self._load_storage() self.detected_type: str = storage.get('detected_type', "?") self.source_checksum: str = storage.get("source_checksum", "?") self.pristine = storage.get('pristine', False) self.verified = storage.get('verified', False) self.build_date: int = storage.get('build_date', 0) self.full_version: str = storage.get('full_version', "?") self.short_version: str = storage.get('short_version', "?") self.commit_hash: str = storage.get('commit_hash', "?") self.lastest_hash: str = storage.get('latest_hash', "?") self.latest_version: str = storage.get('latest_version', "?") self.latest_checksum: str = storage.get('latest_checksum', "?") self.latest_build_date: int = storage.get('latest_build_date', 0) self.errors: List[str] = storage.get('errors', []) self.commit_log: List[Dict[str, Any]] = storage.get('commit_log', []) self.package_list: List[str] = [] self.python_pkg_list: List[str] = [] self.release_download_info: Tuple[str, str, int] = ("?", "?", 0) self.mutex: asyncio.Lock = asyncio.Lock() self.refresh_event: Optional[asyncio.Event] = None
def _load_components(self, config: confighelper.ConfigHelper) -> None: cfg_sections = [s.split()[0] for s in config.sections()] cfg_sections.remove('server') # load core components for component in CORE_COMPONENTS: self.load_component(config, component) if component in cfg_sections: cfg_sections.remove(component) # load remaining optional components for section in cfg_sections: self.load_component(config, section, None)
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.debug_enabled = config.getboolean('enable_repo_debug', False) if self.debug_enabled: logging.warning("UPDATE MANAGER: REPO DEBUG ENABLED") shell_cmd: SCMDComp = self.server.lookup_component('shell_command') self.scmd_error = shell_cmd.error self.build_shell_command = shell_cmd.build_shell_command self.pkg_updater: Optional[PackageDeploy] = None AsyncHTTPClient.configure(None, defaults=dict(user_agent="Moonraker")) self.http_client = AsyncHTTPClient() self.github_request_cache: Dict[str, CachedGithubResponse] = {} # GitHub API Rate Limit Tracking self.gh_rate_limit: Optional[int] = None self.gh_limit_remaining: Optional[int] = None self.gh_limit_reset_time: Optional[float] = None # Update In Progress Tracking self.cur_update_app: Optional[str] = None self.cur_update_id: Optional[int] = None self.full_complete: bool = False
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.ldap_host = config.get('ldap_host') self.ldap_port = config.getint("ldap_port", None) self.ldap_secure = config.getboolean("ldap_secure", False) base_dn_template = config.gettemplate('base_dn') self.base_dn = base_dn_template.render() self.group_dn: Optional[str] = None group_dn_template = config.gettemplate("group_dn", None) if group_dn_template is not None: self.group_dn = group_dn_template.render() self.active_directory = config.getboolean('is_active_directory', False) self.bind_dn: Optional[str] = None self.bind_password: Optional[str] = None bind_dn_template = config.gettemplate('bind_dn', None) bind_pass_template = config.gettemplate('bind_password', None) if bind_dn_template is not None: self.bind_dn = bind_dn_template.render() if bind_pass_template is None: raise config.error("Section [ldap]: Option 'bind_password' is " "required when 'bind_dn' is provided") self.bind_password = bind_pass_template.render() self.lock = asyncio.Lock()
def __init__(self, config: ConfigHelper) -> None: server = config.get_server() self.secrets_file: Optional[pathlib.Path] = None path: Optional[str] = config.get('secrets_path', None) self.type = "invalid" self.values: Dict[str, Any] = {} if path is not None: self.secrets_file = pathlib.Path(path).expanduser().resolve() if not self.secrets_file.is_file(): server.add_warning( "[secrets]: option 'secrets_path', file does not exist: " f"'{self.secrets_file}'") return data = self.secrets_file.read_text() vals = self._parse_json(data) if vals is not None: if not isinstance(vals, dict): server.add_warning( f"[secrets]: option 'secrets_path', top level item in" f" json file '{self.secrets_file}' must be an Object.") return self.values = vals self.type = "json" else: vals = self._parse_ini(data) if vals is None: server.add_warning( "[secrets]: option 'secrets_path', invalid file " f"format, must be json or ini: '{self.secrets_file}'") return self.values = vals self.type = "ini" logging.debug(f"[secrets]: Loaded {self.type} file: " f"{self.secrets_file}") else: logging.debug( "[secrets]: Option `secrets_path` not supplied")
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.notifiers: Dict[str, NotifierInstance] = {} self.events: Dict[str, NotifierEvent] = {} prefix_sections = config.get_prefix_sections("notifier") self.register_events(config) for section in prefix_sections: cfg = config[section] try: notifier = NotifierInstance(cfg) for event in self.events: if event in notifier.events or "*" in notifier.events: self.events[event].register_notifier(notifier) logging.info(f"Registered notifier: '{notifier.get_name()}'") except Exception as e: msg = f"Failed to load notifier[{cfg.get_name()}]\n{e}" self.server.add_warning(msg) continue self.notifiers[notifier.get_name()] = notifier
def __init__(self, config: ConfigHelper, cmd_helper: CommandHelper) -> None: super().__init__(config, cmd_helper) self.repo = config.get('repo').strip().strip("/") self.owner = self.repo.split("/", 1)[0] self.path = pathlib.Path(config.get("path")).expanduser().resolve() self.persistent_files: List[str] = [] pfiles = config.get('persistent_files', None) if pfiles is not None: self.persistent_files = [ pf.strip().strip("/") for pf in pfiles.split("\n") if pf.strip() ] if ".version" in self.persistent_files: raise config.error( "Invalid value for option 'persistent_files': " "'.version' can not be persistent") self.version: str = "?" self.remote_version: str = "?" self.dl_info: Tuple[str, str, int] = ("?", "?", 0) self.refresh_evt: Optional[asyncio.Event] = None self.mutex: asyncio.Lock = asyncio.Lock() logging.info(f"\nInitializing Client Updater: '{self.name}'," f"\npath: {self.path}")
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.file_manager: FMComp = self.server.lookup_component( 'file_manager') database: DBComp = self.server.lookup_component("database") self.gcdb = database.wrap_namespace("gcode_metadata", parse_keys=False) self.job_totals: Dict[str, float] = database.get_item( "moonraker", "history.job_totals", { 'total_jobs': 0, 'total_time': 0., 'total_print_time': 0., 'total_filament_used': 0., 'longest_job': 0., 'longest_print': 0. }) self.server.register_event_handler( "server:klippy_ready", self._init_ready) self.server.register_event_handler( "server:status_update", self._status_update) self.server.register_event_handler( "server:klippy_disconnect", self._handle_disconnect) self.server.register_event_handler( "server:klippy_shutdown", self._handle_shutdown) self.server.register_notification("history:history_changed") self.server.register_endpoint( "/server/history/job", ['GET', 'DELETE'], self._handle_job_request) self.server.register_endpoint( "/server/history/list", ['GET'], self._handle_jobs_list) self.server.register_endpoint( "/server/history/totals", ['GET'], self._handle_job_totals) database.register_local_namespace(HIST_NAMESPACE) self.history_ns = database.wrap_namespace(HIST_NAMESPACE, parse_keys=False) self.current_job: Optional[PrinterJob] = None self.current_job_id: Optional[str] = None self.print_stats: Dict[str, Any] = {} self.next_job_id: int = 0 self.cached_job_ids = self.history_ns.keys() if self.cached_job_ids: self.next_job_id = int(self.cached_job_ids[-1], 16) + 1
def configure_line(self, config: ConfigHelper, chip_factory: GpioChipFactory) -> None: pin, chip_id, invert = self._parse_pin(config) try: chip = chip_factory.get_gpio_chip(chip_id) self.line = chip.get_line(pin) if invert: self.line.request(consumer="moonraker", type=gpiod.LINE_REQ_DIR_OUT, flags=gpiod.LINE_REQ_FLAG_ACTIVE_LOW) else: self.line.request(consumer="moonraker", type=gpiod.LINE_REQ_DIR_OUT) except Exception: self.state = "error" logging.exception( f"Unable to init {pin}. Make sure the gpio is not in " "use by another program or exported by sysfs.") raise config.error("Power GPIO Config Error")
def __init__(self: Strip, name: str, cfg: ConfigHelper): self.server = cfg.get_server() self.request_mutex = asyncio.Lock() self.name = name self.initial_preset: int = cfg.getint("initial_preset", -1) self.initial_red: float = cfg.getfloat("initial_red", 0.5) self.initial_green: float = cfg.getfloat("initial_green", 0.5) self.initial_blue: float = cfg.getfloat("initial_blue", 0.5) self.initial_white: float = cfg.getfloat("initial_white", 0.5) self.chain_count: int = cfg.getint("chain_count", 1) # Supports rgbw always self._chain_data = bytearray(self.chain_count * self._COLORSIZE) self.onoff = OnOff.off self.preset = self.initial_preset
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() dist_info: Dict[str, Any] dist_info = {'name': distro.name(pretty=True)} dist_info.update(distro.info()) self.system_info: Dict[str, Any] = { 'cpu_info': self._get_cpu_info(), 'sd_info': self._get_sdcard_info(), 'distribution': dist_info } # Add system info to log rollover sys_info_msg = "\nSystem Info:" for header, info in self.system_info.items(): sys_info_msg += f"\n\n***{header}***" for key, val in info.items(): sys_info_msg += f"\n {key}: {val}" self.server.add_log_rollover_item('system_info', sys_info_msg) self.available_services: List[str] = [] self.server.register_endpoint("/machine/reboot", ['POST'], self._handle_machine_request) self.server.register_endpoint("/machine/shutdown", ['POST'], self._handle_machine_request) self.server.register_endpoint("/machine/services/restart", ['POST'], self._handle_service_request) self.server.register_endpoint("/machine/services/stop", ['POST'], self._handle_service_request) self.server.register_endpoint("/machine/services/start", ['POST'], self._handle_service_request) self.server.register_endpoint("/machine/system_info", ['GET'], self._handle_sysinfo_request) # Register remote methods self.server.register_remote_method("shutdown_machine", self.shutdown_machine) self.server.register_remote_method("reboot_machine", self.reboot_machine) # Retreive list of services event_loop = self.server.get_event_loop() event_loop.register_callback(self._find_active_services)
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() system_args = config['system_args'] self.version = system_args.get('software_version') # Maintain a subscription for all moonraker requests, as # we do not want to overwrite them self.host_subscription: Subscription = {} # Register GCode Aliases self.server.register_endpoint("/printer/print/pause", ['POST'], self._gcode_pause) self.server.register_endpoint("/printer/print/resume", ['POST'], self._gcode_resume) self.server.register_endpoint("/printer/print/cancel", ['POST'], self._gcode_cancel) self.server.register_endpoint("/printer/print/start", ['POST'], self._gcode_start_print) self.server.register_endpoint("/printer/restart", ['POST'], self._gcode_restart) self.server.register_endpoint("/printer/firmware_restart", ['POST'], self._gcode_firmware_restart)
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.event_loop = self.server.get_event_loop() self.watchdog = Watchdog(self) self.stat_update_timer = self.event_loop.register_timer( self._handle_stat_update) self.vcgencmd: Optional[shell_command.ShellCommand] = None if os.path.exists(VC_GEN_CMD_FILE): logging.info("Detected 'vcgencmd', throttle checking enabled") shell_cmd: shell_command.ShellCommandFactory shell_cmd = self.server.load_component(config, "shell_command") self.vcgencmd = shell_cmd.build_shell_command( "vcgencmd get_throttled") self.server.register_notification("proc_stats:cpu_throttled") else: logging.info("Unable to find 'vcgencmd', throttle checking " "disabled") self.temp_file = pathlib.Path(TEMPERATURE_PATH) self.smaps = pathlib.Path(STATM_FILE_PATH) self.netdev_file = pathlib.Path(NET_DEV_PATH) self.cpu_stats_file = pathlib.Path(CPU_STAT_PATH) self.meminfo_file = pathlib.Path(MEM_AVAIL_PATH) self.server.register_endpoint("/machine/proc_stats", ["GET"], self._handle_stat_request) self.server.register_event_handler("server:klippy_shutdown", self._handle_shutdown) self.server.register_notification("proc_stats:proc_stat_update") self.proc_stat_queue: Deque[Dict[str, Any]] = deque(maxlen=30) self.last_update_time = time.time() self.last_proc_time = time.process_time() self.throttle_check_lock = asyncio.Lock() self.total_throttled: int = 0 self.last_throttled: int = 0 self.update_sequence: int = 0 self.last_net_stats: Dict[str, Dict[str, Any]] = {} self.last_cpu_stats: Dict[str, Tuple[int, int]] = {} self.cpu_usage: Dict[str, float] = {} self.memory_usage: Dict[str, int] = {} self.stat_callbacks: List[STAT_CALLBACK] = []
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() dist_info: Dict[str, Any] dist_info = {'name': distro.name(pretty=True)} dist_info.update(distro.info()) dist_info['release_info'] = distro.distro_release_info() self.inside_container = False self.virt_id = "none" self.system_info: Dict[str, Any] = { 'cpu_info': self._get_cpu_info(), 'sd_info': self._get_sdcard_info(), 'distribution': dist_info, 'virtualization': self._check_inside_container() } self._update_log_rollover(log=True) self.available_services: Dict[str, Dict[str, str]] = {} self.server.register_endpoint("/machine/reboot", ['POST'], self._handle_machine_request) self.server.register_endpoint("/machine/shutdown", ['POST'], self._handle_machine_request) self.server.register_endpoint("/machine/services/restart", ['POST'], self._handle_service_request) self.server.register_endpoint("/machine/services/stop", ['POST'], self._handle_service_request) self.server.register_endpoint("/machine/services/start", ['POST'], self._handle_service_request) self.server.register_endpoint("/machine/system_info", ['GET'], self._handle_sysinfo_request) self.server.register_notification("machine:service_state_changed") # Register remote methods self.server.register_remote_method("shutdown_machine", self.shutdown_machine) self.server.register_remote_method("reboot_machine", self.reboot_machine) self.init_evt = asyncio.Event()
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.queued_jobs: Dict[str, QueuedJob] = {} self.lock = asyncio.Lock() self.load_on_start = config.getboolean("load_on_startup", False) self.automatic = config.getboolean("automatic_transition", False) self.queue_state: str = "ready" if self.automatic else "paused" self.job_delay = config.getfloat("job_transition_delay", 0.01) if self.job_delay <= 0.: raise config.error( "Value for option 'job_transition_delay' in section [job_queue]" " must be above 0.0") self.job_transition_gcode = config.get( "job_transition_gcode", "").strip() self.pop_queue_handle: Optional[asyncio.TimerHandle] = None self.server.register_event_handler( "server:klippy_ready", self._handle_ready) self.server.register_event_handler( "server:klippy_shutdown", self._handle_shutdown) self.server.register_event_handler( "job_state:complete", self._on_job_complete) self.server.register_event_handler( "job_state:error", self._on_job_abort) self.server.register_event_handler( "job_state:cancelled", self._on_job_abort) self.server.register_notification("job_queue:job_queue_changed") self.server.register_remote_method("pause_job_queue", self.pause_queue) self.server.register_remote_method("start_job_queue", self.start_queue) self.server.register_endpoint( "/server/job_queue/job", ['POST', 'DELETE'], self._handle_job_request) self.server.register_endpoint( "/server/job_queue/pause", ['POST'], self._handle_pause_queue) self.server.register_endpoint( "/server/job_queue/start", ['POST'], self._handle_start_queue) self.server.register_endpoint( "/server/job_queue/status", ['GET'], self._handle_queue_status)
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.eventloop = self.server.get_event_loop() self.name = config.get_name().split()[-1] self.itransport: ITransport = self.server.lookup_component( 'internal_transport') self.mutex = asyncio.Lock() gpio: GpioFactory = self.server.load_component(config, 'gpio') self.gpio_event = gpio.register_gpio_event(config.get('pin'), self._on_gpio_event) min_event_time = config.getfloat('minimum_event_time', .05, minval=.010) self.gpio_event.setup_debounce(min_event_time, self._on_gpio_error) self.press_template = config.gettemplate("on_press", None, is_async=True) self.release_template = config.gettemplate("on_release", None, is_async=True) if (self.press_template is None and self.release_template is None): raise config.error( f"[{config.get_name()}]: No template option configured") self.notification_sent: bool = False self.user_data: Dict[str, Any] = {} self.context: Dict[str, Any] = { 'call_method': self.itransport.call_method, 'send_notification': self._send_notification, 'event': { 'elapsed_time': 0., 'received_time': 0., 'render_time': 0., 'pressed': False, }, 'user_data': self.user_data }
def __init__(self, config: ConfigHelper) -> None: super().__init__(config) if self.off_when_shutdown: raise config.error( "Option 'off_when_shutdown' in section " f"[{config.get_name()}] is unsupported for 'klipper_device'") if self.klipper_restart: raise config.error( "Option 'restart_klipper_when_powered' in section " f"[{config.get_name()}] is unsupported for 'klipper_device'") if (self.bound_service is not None and self.bound_service.startswith("klipper")): # Klipper devices cannot be bound to an instance of klipper or # klipper_mcu raise config.error( f"Option 'bound_service' cannot be set to {self.bound_service}" f" for 'klipper_device' [{config.get_name()}]") self.is_shutdown: bool = False self.update_fut: Optional[asyncio.Future] = None self.request_mutex = asyncio.Lock() self.timer: Optional[float] = config.getfloat('timer', None, above=0.000001) self.timer_handle: Optional[asyncio.TimerHandle] = None self.object_name = config.get('object_name') obj_parts = self.object_name.split() self.gc_cmd = f"SET_PIN PIN={obj_parts[-1]} " if obj_parts[0] == "gcode_macro": self.gc_cmd = obj_parts[-1] elif obj_parts[0] != "output_pin": raise config.error( "Klipper object must be either 'output_pin' or 'gcode_macro' " f"for option 'object_name' in section [{config.get_name()}]") self.server.register_event_handler("server:status_update", self._status_update) self.server.register_event_handler("server:klippy_ready", self._handle_ready) self.server.register_event_handler("server:klippy_disconnect", self._handle_disconnect)
def __init__(self, config: ConfigHelper) -> None: self.server = config.get_server() self.event_loop = self.server.get_event_loop() self.app_config = config.read_supplemental_config( SUPPLEMENTAL_CFG_PATH) auto_refresh_enabled = config.getboolean('enable_auto_refresh', False) self.channel = config.get('channel', "dev") if self.channel not in ["dev", "beta"]: raise config.error( f"Unsupported channel '{self.channel}' in section" " [update_manager]") self.cmd_helper = CommandHelper(config) self.updaters: Dict[str, BaseDeploy] = {} if config.getboolean('enable_system_updates', True): self.updaters['system'] = PackageDeploy(config, self.cmd_helper) if ( os.path.exists(KLIPPER_DEFAULT_PATH) and os.path.exists(KLIPPER_DEFAULT_EXEC) ): self.updaters['klipper'] = get_deploy_class(KLIPPER_DEFAULT_PATH)( self.app_config[f"update_manager klipper"], self.cmd_helper, { 'channel': self.channel, 'path': KLIPPER_DEFAULT_PATH, 'executable': KLIPPER_DEFAULT_EXEC }) else: self.updaters['klipper'] = BaseDeploy( self.app_config[f"update_manager klipper"], self.cmd_helper) self.updaters['moonraker'] = get_deploy_class(MOONRAKER_PATH)( self.app_config[f"update_manager moonraker"], self.cmd_helper, { 'channel': self.channel, 'path': MOONRAKER_PATH, 'executable': sys.executable }) # TODO: The below check may be removed when invalid config options # raise a config error. if ( config.get("client_repo", None) is not None or config.get('client_path', None) is not None ): raise config.error( "The deprecated 'client_repo' and 'client_path' options\n" "have been removed. See Moonraker's configuration docs\n" "for details on client configuration.") client_sections = config.get_prefix_sections("update_manager ") for section in client_sections: cfg = config[section] name = section.split()[-1] if name in self.updaters: raise config.error(f"Client repo {name} already added") client_type = cfg.get("type") if client_type in ["web", "web_beta"]: self.updaters[name] = WebClientDeploy(cfg, self.cmd_helper) elif client_type in ["git_repo", "zip", "zip_beta"]: path = os.path.expanduser(cfg.get('path')) self.updaters[name] = get_deploy_class(path)( cfg, self.cmd_helper) else: raise config.error( f"Invalid type '{client_type}' for section [{section}]") self.cmd_request_lock = asyncio.Lock() self.initialized_lock = asyncio.Event() self.klippy_identified_evt: Optional[asyncio.Event] = None # Auto Status Refresh self.last_refresh_time: float = 0 self.refresh_cb: Optional[PeriodicCallback] = None if auto_refresh_enabled: self.refresh_cb = PeriodicCallback( self._handle_auto_refresh, # type: ignore UPDATE_REFRESH_INTERVAL_MS) self.server.register_endpoint( "/machine/update/moonraker", ["POST"], self._handle_update_request) self.server.register_endpoint( "/machine/update/klipper", ["POST"], self._handle_update_request) self.server.register_endpoint( "/machine/update/system", ["POST"], self._handle_update_request) self.server.register_endpoint( "/machine/update/client", ["POST"], self._handle_update_request) self.server.register_endpoint( "/machine/update/full", ["POST"], self._handle_full_update_request) self.server.register_endpoint( "/machine/update/status", ["GET"], self._handle_status_request) self.server.register_endpoint( "/machine/update/recover", ["POST"], self._handle_repo_recovery) self.server.register_notification("update_manager:update_response") self.server.register_notification("update_manager:update_refreshed") # Register Ready Event self.server.register_event_handler( "server:klippy_identified", self._set_klipper_repo) # Initialize GitHub API Rate Limits and configured updaters self.event_loop.register_callback( self._initalize_updaters, list(self.updaters.values()))
def _verify_path(self, config: ConfigHelper, option: str, file_path: pathlib.Path) -> None: if not file_path.exists(): raise config.error( f"Invalid path for option `{option}` in section " f"[{config.get_name()}]: Path `{file_path}` does not exist")
def __init__(self, config: ConfigHelper, cmd_helper: CommandHelper, app_params: Optional[Dict[str, Any]]) -> None: super().__init__(config, cmd_helper) self.config = config self.app_params = app_params self.debug = self.cmd_helper.is_debug_enabled() if app_params is not None: self.channel: str = app_params['channel'] self.path: pathlib.Path = pathlib.Path( app_params['path']).expanduser().resolve() executable: Optional[str] = app_params['executable'] self.type = CHANNEL_TO_TYPE[self.channel] else: self.type = config.get('type') self.channel = TYPE_TO_CHANNEL[self.type] self.path = pathlib.Path(config.get('path')).expanduser().resolve() executable = config.get('env', None) if self.channel not in CHANNEL_TO_TYPE.keys(): raise config.error(f"Invalid Channel '{self.channel}' for config " f"section [{config.get_name()}]") self._verify_path(config, 'path', self.path) self.executable: Optional[pathlib.Path] = None self.venv_args: Optional[str] = None if executable is not None: self.executable = pathlib.Path(executable).expanduser().resolve() self._verify_path(config, 'env', self.executable) self.venv_args = config.get('venv_args', None) self.is_service = config.getboolean("is_system_service", True) self.need_channel_update = False self._is_valid = False # We need to fetch all potential options for an Application. Not # all options apply to each subtype, however we can't limit the # options in children if we want to switch between channels and # satisfy the confighelper's requirements. self.origin: str = config.get('origin') self.primary_branch = config.get("primary_branch", "master") self.npm_pkg_json: Optional[pathlib.Path] = None if config.get("enable_node_updates", False): self.npm_pkg_json = self.path.joinpath("package-lock.json") self._verify_path(config, 'enable_node_updates', self.npm_pkg_json) self.python_reqs: Optional[pathlib.Path] = None if self.executable is not None: self.python_reqs = self.path.joinpath(config.get("requirements")) self._verify_path(config, 'requirements', self.python_reqs) self.install_script: Optional[pathlib.Path] = None install_script = config.get('install_script', None) if install_script is not None: self.install_script = self.path.joinpath(install_script).resolve() self._verify_path(config, 'install_script', self.install_script)