def process_ha_config_upgrade(hass: HomeAssistant) -> None: """Upgrade configuration if necessary. This method needs to run in an executor. """ version_path = hass.config.path(VERSION_FILE) try: with open(version_path, "rt") as inp: conf_version = inp.readline().strip() except FileNotFoundError: # Last version to not have this file conf_version = "0.7.7" if conf_version == __version__: return _LOGGER.info( "Upgrading configuration directory from %s to %s", conf_version, __version__ ) version_obj = LooseVersion(conf_version) if version_obj < LooseVersion("0.50"): # 0.50 introduced persistent deps dir. lib_path = hass.config.path("deps") if os.path.isdir(lib_path): shutil.rmtree(lib_path) if version_obj < LooseVersion("0.92"): # 0.92 moved google/tts.py to google_translate/tts.py config_path = find_config_file(hass.config.config_dir) assert config_path is not None with open(config_path, "rt", encoding="utf-8") as config_file: config_raw = config_file.read() if TTS_PRE_92 in config_raw: _LOGGER.info("Migrating google tts to google_translate tts") config_raw = config_raw.replace(TTS_PRE_92, TTS_92) try: with open(config_path, "wt", encoding="utf-8") as config_file: config_file.write(config_raw) except OSError: _LOGGER.exception("Migrating to google_translate tts failed") pass if version_obj < LooseVersion("0.94") and is_docker_env(): # In 0.94 we no longer install packages inside the deps folder when # running inside a Docker container. lib_path = hass.config.path("deps") if os.path.isdir(lib_path): shutil.rmtree(lib_path) with open(version_path, "wt") as outp: outp.write(__version__)
def pip_kwargs(config_dir: Optional[str]) -> Dict[str, Any]: """Return keyword arguments for PIP install.""" is_docker = pkg_util.is_docker_env() kwargs = { "constraints": os.path.join(os.path.dirname(__file__), CONSTRAINT_FILE), "no_cache_dir": is_docker, } if "WHEELS_LINKS" in os.environ: kwargs["find_links"] = os.environ["WHEELS_LINKS"] if not (config_dir is None or pkg_util.is_virtual_env()) and not is_docker: kwargs["target"] = os.path.join(config_dir, "deps") return kwargs
def pip_kwargs(config_dir: Optional[str]) -> Dict[str, Any]: """Return keyword arguments for PIP install.""" is_docker = pkg_util.is_docker_env() kwargs = { 'constraints': os.path.join(os.path.dirname(__file__), CONSTRAINT_FILE), 'no_cache_dir': is_docker, } if 'WHEELS_LINKS' in os.environ: kwargs['find_links'] = os.environ['WHEELS_LINKS'] if not (config_dir is None or pkg_util.is_virtual_env()) and \ not is_docker: kwargs['target'] = os.path.join(config_dir, 'deps') return kwargs
(CONF_NAME, "location_name"), (CONF_ELEVATION, "elevation"), (CONF_INTERNAL_URL, "internal_url"), (CONF_EXTERNAL_URL, "external_url"), (CONF_MEDIA_DIRS, "media_dirs"), (CONF_LEGACY_TEMPLATES, "legacy_templates"), (CONF_CURRENCY, "currency"), ): if key in config: setattr(hac, attr, config[key]) if CONF_TIME_ZONE in config: hac.set_time_zone(config[CONF_TIME_ZONE]) if CONF_MEDIA_DIRS not in config: if is_docker_env(): hac.media_dirs = {"local": "/media"} else: hac.media_dirs = {"local": hass.config.path("media")} # Init whitelist external dir hac.allowlist_external_dirs = { hass.config.path("www"), *hac.media_dirs.values() } if CONF_ALLOWLIST_EXTERNAL_DIRS in config: hac.allowlist_external_dirs.update( set(config[CONF_ALLOWLIST_EXTERNAL_DIRS])) elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config: _LOGGER.warning( "Key %s has been replaced with %s. Please update your config",
async def async_process_ha_core_config(hass: HomeAssistant, config: Dict) -> None: """Process the [homeassistant] section from the configuration. This method is a coroutine. """ config = CORE_CONFIG_SCHEMA(config) # Only load auth during startup. if not hasattr(hass, "auth"): auth_conf = config.get(CONF_AUTH_PROVIDERS) if auth_conf is None: auth_conf = [{"type": "homeassistant"}] mfa_conf = config.get( CONF_AUTH_MFA_MODULES, [{ "type": "totp", "id": "totp", "name": "Authenticator app" }], ) setattr(hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf)) await hass.config.async_load() hac = hass.config if any(k in config for k in [ CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, CONF_ELEVATION, CONF_TIME_ZONE, CONF_UNIT_SYSTEM, CONF_EXTERNAL_URL, CONF_INTERNAL_URL, CONF_MEDIA_DIRS, ]): hac.config_source = SOURCE_YAML for key, attr in ( (CONF_LATITUDE, "latitude"), (CONF_LONGITUDE, "longitude"), (CONF_NAME, "location_name"), (CONF_ELEVATION, "elevation"), (CONF_INTERNAL_URL, "internal_url"), (CONF_EXTERNAL_URL, "external_url"), (CONF_MEDIA_DIRS, "media_dirs"), ): if key in config: setattr(hac, attr, config[key]) if CONF_TIME_ZONE in config: hac.set_time_zone(config[CONF_TIME_ZONE]) if CONF_MEDIA_DIRS not in config: if is_docker_env(): hac.media_dirs = {"local": "/media"} else: hac.media_dirs = {"local": hass.config.path("media")} # Init whitelist external dir hac.allowlist_external_dirs = { hass.config.path("www"), *hac.media_dirs.values() } if CONF_ALLOWLIST_EXTERNAL_DIRS in config: hac.allowlist_external_dirs.update( set(config[CONF_ALLOWLIST_EXTERNAL_DIRS])) elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config: _LOGGER.warning( "Key %s has been replaced with %s. Please update your config", LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, CONF_ALLOWLIST_EXTERNAL_DIRS, ) hac.allowlist_external_dirs.update( set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS])) # Init whitelist external URL list – make sure to add / to every URL that doesn't # already have it so that we can properly test "path ownership" if CONF_ALLOWLIST_EXTERNAL_URLS in config: hac.allowlist_external_urls.update( url if url.endswith("/") else f"{url}/" for url in config[CONF_ALLOWLIST_EXTERNAL_URLS]) # Customize cust_exact = dict(config[CONF_CUSTOMIZE]) cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN]) cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) for name, pkg in config[CONF_PACKAGES].items(): pkg_cust = pkg.get(CONF_CORE) if pkg_cust is None: continue try: pkg_cust = CUSTOMIZE_CONFIG_SCHEMA(pkg_cust) except vol.Invalid: _LOGGER.warning("Package %s contains invalid customize", name) continue cust_exact.update(pkg_cust[CONF_CUSTOMIZE]) cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN]) cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB]) hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob) if CONF_UNIT_SYSTEM in config: if config[CONF_UNIT_SYSTEM] == CONF_UNIT_SYSTEM_IMPERIAL: hac.units = IMPERIAL_SYSTEM else: hac.units = METRIC_SYSTEM elif CONF_TEMPERATURE_UNIT in config: unit = config[CONF_TEMPERATURE_UNIT] hac.units = METRIC_SYSTEM if unit == TEMP_CELSIUS else IMPERIAL_SYSTEM _LOGGER.warning( "Found deprecated temperature unit in core " "configuration expected unit system. Replace '%s: %s' " "with '%s: %s'", CONF_TEMPERATURE_UNIT, unit, CONF_UNIT_SYSTEM, hac.units.name, )
async def _async_start(self) -> None: """Start bluetooth scanner under the lock.""" for attempt in range(START_ATTEMPTS): _LOGGER.debug( "%s: Starting bluetooth discovery attempt: (%s/%s)", self.name, attempt + 1, START_ATTEMPTS, ) try: async with async_timeout.timeout(START_TIMEOUT): await self.scanner.start() # type: ignore[no-untyped-call] except InvalidMessageError as ex: _LOGGER.debug( "%s: Invalid DBus message received: %s", self.name, ex, exc_info=True, ) raise ScannerStartError( f"{self.name}: Invalid DBus message received: {ex}; " "try restarting `dbus`") from ex except BrokenPipeError as ex: _LOGGER.debug("%s: DBus connection broken: %s", self.name, ex, exc_info=True) if is_docker_env(): raise ScannerStartError( f"{self.name}: DBus connection broken: {ex}; try restarting " "`bluetooth`, `dbus`, and finally the docker container" ) from ex raise ScannerStartError( f"{self.name}: DBus connection broken: {ex}; try restarting " "`bluetooth` and `dbus`") from ex except FileNotFoundError as ex: _LOGGER.debug( "%s: FileNotFoundError while starting bluetooth: %s", self.name, ex, exc_info=True, ) if is_docker_env(): raise ScannerStartError( f"{self.name}: DBus service not found; docker config may " "be missing `-v /run/dbus:/run/dbus:ro`: {ex}") from ex raise ScannerStartError( f"{self.name}: DBus service not found; make sure the DBus socket " f"is available to Home Assistant: {ex}") from ex except asyncio.TimeoutError as ex: if attempt == 0: await self._async_reset_adapter() continue raise ScannerStartError( f"{self.name}: Timed out starting Bluetooth after {START_TIMEOUT} seconds" ) from ex except BleakError as ex: error_str = str(ex) if attempt == 0: if any(needs_reset_error in error_str for needs_reset_error in NEED_RESET_ERRORS): await self._async_reset_adapter() continue if attempt != START_ATTEMPTS - 1: # If we are not out of retry attempts, and the # adapter is still initializing, wait a bit and try again. if any(wait_error in error_str for wait_error in WAIT_FOR_ADAPTER_TO_INIT_ERRORS): _LOGGER.debug( "%s: Waiting for adapter to initialize; attempt (%s/%s)", self.name, attempt + 1, START_ATTEMPTS, ) await asyncio.sleep(ADAPTER_INIT_TIME) continue _LOGGER.debug( "%s: BleakError while starting bluetooth; attempt: (%s/%s): %s", self.name, attempt + 1, START_ATTEMPTS, ex, exc_info=True, ) raise ScannerStartError( f"{self.name}: Failed to start Bluetooth: {ex}") from ex # Everything is fine, break out of the loop break self._async_setup_scanner_watchdog() self._cancel_stop = self.hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STOP, self._async_hass_stopping)
def process_ha_config_upgrade(hass: HomeAssistant) -> None: """Upgrade configuration if necessary. This method needs to run in an executor. """ version_path = hass.config.path(VERSION_FILE) try: with open(version_path, 'rt') as inp: conf_version = inp.readline().strip() except FileNotFoundError: # Last version to not have this file conf_version = '0.7.7' if conf_version == __version__: return _LOGGER.info("Upgrading configuration directory from %s to %s", conf_version, __version__) version_obj = StrictVersion(conf_version) if version_obj < StrictVersion('0.50'): # 0.50 introduced persistent deps dir. lib_path = hass.config.path('deps') if os.path.isdir(lib_path): shutil.rmtree(lib_path) if version_obj < StrictVersion('0.92'): # 0.92 moved google/tts.py to google_translate/tts.py config_path = find_config_file(hass.config.config_dir) assert config_path is not None with open(config_path, 'rt', encoding='utf-8') as config_file: config_raw = config_file.read() if TTS_PRE_92 in config_raw: _LOGGER.info("Migrating google tts to google_translate tts") config_raw = config_raw.replace(TTS_PRE_92, TTS_92) try: with open(config_path, 'wt', encoding='utf-8') as config_file: config_file.write(config_raw) except IOError: _LOGGER.exception("Migrating to google_translate tts failed") pass if version_obj < StrictVersion('0.94.0b6') and is_docker_env(): # In 0.94 we no longer install packages inside the deps folder when # running inside a Docker container. lib_path = hass.config.path('deps') if os.path.isdir(lib_path): shutil.rmtree(lib_path) with open(version_path, 'wt') as outp: outp.write(__version__) _LOGGER.debug("Migrating old system configuration files to new locations") for oldf, newf in FILE_MIGRATION: if os.path.isfile(hass.config.path(oldf)): _LOGGER.info("Migrating %s to %s", oldf, newf) os.rename(hass.config.path(oldf), hass.config.path(newf))