async def handle_manifest_list(opp: OpenPeerPower, connection: ActiveConnection, msg: dict[str, Any]) -> None: """Handle integrations command.""" loaded_integrations = async_get_loaded_integrations(opp) integrations = await asyncio.gather( * [async_get_integration(opp, domain) for domain in loaded_integrations]) connection.send_result( msg["id"], [integration.manifest for integration in integrations])
async def test_component_translation_path(opp, enable_custom_integrations): """Test the component translation file function.""" assert await async_setup_component( opp, "switch", {"switch": [{ "platform": "test" }, { "platform": "test_embedded" }]}, ) assert await async_setup_component(opp, "test_package", {"test_package"}) ( int_test, int_test_embedded, int_test_package, ) = await asyncio.gather( async_get_integration(opp, "test"), async_get_integration(opp, "test_embedded"), async_get_integration(opp, "test_package"), ) assert path.normpath( translation.component_translation_path( "switch.test", "en", int_test)) == path.normpath( opp.config.path("custom_components", "test", "translations", "switch.en.json")) assert path.normpath( translation.component_translation_path( "switch.test_embedded", "en", int_test_embedded)) == path.normpath( opp.config.path("custom_components", "test_embedded", "translations", "switch.en.json")) assert path.normpath( translation.component_translation_path( "test_package", "en", int_test_package)) == path.normpath( opp.config.path("custom_components", "test_package", "translations", "en.json"))
async def async_get_component_strings(opp: OpenPeerPower, language: str, components: set[str]) -> dict[str, Any]: """Load translations.""" domains = list({loaded.split(".")[-1] for loaded in components}) integrations = dict( zip( domains, await gather_with_concurrency( MAX_LOAD_CONCURRENTLY, *[async_get_integration(opp, domain) for domain in domains], ), )) translations: dict[str, Any] = {} # Determine paths of missing components/platforms files_to_load = {} for loaded in components: parts = loaded.split(".") domain = parts[-1] integration = integrations[domain] path = component_translation_path(loaded, language, integration) # No translation available if path is None: translations[loaded] = {} else: files_to_load[loaded] = path if not files_to_load: return translations # Load files load_translations_job = opp.async_add_executor_job(load_translations_files, files_to_load) assert load_translations_job is not None loaded_translations = await load_translations_job # Translations that miss "title" will get integration put in. for loaded, loaded_translation in loaded_translations.items(): if "." in loaded: continue if "title" not in loaded_translation: loaded_translation["title"] = integrations[loaded].name translations.update(loaded_translations) return translations
async def _async_set_up_integrations( opp: core.OpenPeerPower, config: Dict[str, Any] ) -> None: """Set up all the integrations.""" domains = _get_domains(opp, config) # Start up debuggers. Start these first in case they want to wait. debuggers = domains & DEBUGGER_INTEGRATIONS if debuggers: _LOGGER.debug("Starting up debuggers %s", debuggers) await asyncio.gather( *(async_setup_component(opp, domain, config) for domain in debuggers) ) domains -= DEBUGGER_INTEGRATIONS # Resolve all dependencies of all components so we can find the logging # and integrations that need faster initialization. resolved_domains_task = asyncio.gather( *(loader.async_component_dependencies(opp, domain) for domain in domains), return_exceptions=True, ) # Finish resolving domains for dep_domains in await resolved_domains_task: # Result is either a set or an exception. We ignore exceptions # It will be properly handled during setup of the domain. if isinstance(dep_domains, set): domains.update(dep_domains) # setup components logging_domains = domains & LOGGING_INTEGRATIONS stage_1_domains = domains & STAGE_1_INTEGRATIONS stage_2_domains = domains - logging_domains - stage_1_domains if logging_domains: _LOGGER.info("Setting up %s", logging_domains) await asyncio.gather( *(async_setup_component(opp, domain, config) for domain in logging_domains) ) # Kick off loading the registries. They don't need to be awaited. asyncio.gather( opp.helpers.device_registry.async_get_registry(), opp.helpers.entity_registry.async_get_registry(), opp.helpers.area_registry.async_get_registry(), ) if stage_1_domains: await asyncio.gather( *(async_setup_component(opp, domain, config) for domain in stage_1_domains) ) # Load all integrations after_dependencies: Dict[str, Set[str]] = {} for int_or_exc in await asyncio.gather( *(loader.async_get_integration(opp, domain) for domain in stage_2_domains), return_exceptions=True, ): # Exceptions are handled in async_setup_component. if isinstance(int_or_exc, loader.Integration) and int_or_exc.after_dependencies: after_dependencies[int_or_exc.domain] = set(int_or_exc.after_dependencies) last_load = None while stage_2_domains: domains_to_load = set() for domain in stage_2_domains: after_deps = after_dependencies.get(domain) # Load if integration has no after_dependencies or they are # all loaded if not after_deps or not after_deps - opp.config.components: domains_to_load.add(domain) if not domains_to_load or domains_to_load == last_load: break _LOGGER.debug("Setting up %s", domains_to_load) await asyncio.gather( *(async_setup_component(opp, domain, config) for domain in domains_to_load) ) last_load = domains_to_load stage_2_domains -= domains_to_load # These are stage 2 domains that never have their after_dependencies # satisfied. if stage_2_domains: _LOGGER.debug("Final set up: %s", stage_2_domains) await asyncio.gather( *(async_setup_component(opp, domain, config) for domain in stage_2_domains) ) # Wrap up startup await opp.async_block_till_done()
async def send_analytics(self, _=None) -> None: """Send analytics.""" supervisor_info = None operating_system_info = {} if not self.onboarded or not self.preferences.get(ATTR_BASE, False): LOGGER.debug("Nothing to submit") return if self._data.get(ATTR_UUID) is None: self._data[ATTR_UUID] = uuid.uuid4().hex await self._store.async_save(self._data) if self.supervisor: supervisor_info = oppio.get_supervisor_info(self.opp) operating_system_info = oppio.get_os_info(self.opp) system_info = await async_get_system_info(self.opp) integrations = [] custom_integrations = [] addons = [] payload: dict = { ATTR_UUID: self.uuid, ATTR_VERSION: HA_VERSION, ATTR_INSTALLATION_TYPE: system_info[ATTR_INSTALLATION_TYPE], } if supervisor_info is not None: payload[ATTR_SUPERVISOR] = { ATTR_HEALTHY: supervisor_info[ATTR_HEALTHY], ATTR_SUPPORTED: supervisor_info[ATTR_SUPPORTED], ATTR_ARCH: supervisor_info[ATTR_ARCH], } if operating_system_info.get(ATTR_BOARD) is not None: payload[ATTR_OPERATING_SYSTEM] = { ATTR_BOARD: operating_system_info[ATTR_BOARD], ATTR_VERSION: operating_system_info[ATTR_VERSION], } if self.preferences.get(ATTR_USAGE, False) or self.preferences.get( ATTR_STATISTICS, False): configured_integrations = await asyncio.gather( *[ async_get_integration(self.opp, domain) for domain in async_get_loaded_integrations(self.opp) ], return_exceptions=True, ) for integration in configured_integrations: if isinstance(integration, IntegrationNotFound): continue if isinstance(integration, BaseException): raise integration if integration.disabled: continue if not integration.is_built_in: custom_integrations.append({ ATTR_DOMAIN: integration.domain, ATTR_VERSION: integration.version, }) continue integrations.append(integration.domain) if supervisor_info is not None: installed_addons = await asyncio.gather(*[ oppio.async_get_addon_info(self.opp, addon[ATTR_SLUG]) for addon in supervisor_info[ATTR_ADDONS] ]) for addon in installed_addons: addons.append({ ATTR_SLUG: addon[ATTR_SLUG], ATTR_PROTECTED: addon[ATTR_PROTECTED], ATTR_VERSION: addon[ATTR_VERSION], ATTR_AUTO_UPDATE: addon[ATTR_AUTO_UPDATE], }) if self.preferences.get(ATTR_USAGE, False): payload[ATTR_INTEGRATIONS] = integrations payload[ATTR_CUSTOM_INTEGRATIONS] = custom_integrations if supervisor_info is not None: payload[ATTR_ADDONS] = addons if self.preferences.get(ATTR_STATISTICS, False): payload[ATTR_STATE_COUNT] = len(self.opp.states.async_all()) payload[ATTR_AUTOMATION_COUNT] = len( self.opp.states.async_all(AUTOMATION_DOMAIN)) payload[ATTR_INTEGRATION_COUNT] = len(integrations) if supervisor_info is not None: payload[ATTR_ADDON_COUNT] = len(addons) payload[ATTR_USER_COUNT] = len([ user for user in await self.opp.auth.async_get_users() if not user.system_generated ]) try: with async_timeout.timeout(30): response = await self.session.post(self.endpoint, json=payload) if response.status == 200: LOGGER.info( ("Submitted analytics to Open Peer Power servers. " "Information submitted includes %s"), payload, ) else: LOGGER.warning( "Sending analytics failed with statuscode %s from %s", response.status, self.endpoint, ) except asyncio.TimeoutError: LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL) except aiohttp.ClientError as err: LOGGER.error("Error sending analytics to %s: %r", ANALYTICS_ENDPOINT_URL, err)
async def test_integrations_only_once(opp): """Test that we load integrations only once.""" int_1 = opp.async_create_task(loader.async_get_integration(opp, "hue")) int_2 = opp.async_create_task(loader.async_get_integration(opp, "hue")) assert await int_1 is await int_2
async def _async_set_up_integrations(opp: core.OpenPeerPower, config: dict[str, Any]) -> None: """Set up all the integrations.""" opp.data[DATA_SETUP_STARTED] = {} setup_time = opp.data[DATA_SETUP_TIME] = {} watch_task = asyncio.create_task(_async_watch_pending_setups(opp)) domains_to_setup = _get_domains(opp, config) # Resolve all dependencies so we know all integrations # that will have to be loaded and start rightaway integration_cache: dict[str, loader.Integration] = {} to_resolve = domains_to_setup while to_resolve: old_to_resolve = to_resolve to_resolve = set() integrations_to_process = [ int_or_exc for int_or_exc in await gather_with_concurrency( loader.MAX_LOAD_CONCURRENTLY, *(loader.async_get_integration(opp, domain) for domain in old_to_resolve), return_exceptions=True, ) if isinstance(int_or_exc, loader.Integration) ] resolve_dependencies_tasks = [ itg.resolve_dependencies() for itg in integrations_to_process if not itg.all_dependencies_resolved ] if resolve_dependencies_tasks: await asyncio.gather(*resolve_dependencies_tasks) for itg in integrations_to_process: integration_cache[itg.domain] = itg for dep in itg.all_dependencies: if dep in domains_to_setup: continue domains_to_setup.add(dep) to_resolve.add(dep) _LOGGER.info("Domains to be set up: %s", domains_to_setup) logging_domains = domains_to_setup & LOGGING_INTEGRATIONS # Load logging as soon as possible if logging_domains: _LOGGER.info("Setting up logging: %s", logging_domains) await async_setup_multi_components(opp, logging_domains, config) # Start up debuggers. Start these first in case they want to wait. debuggers = domains_to_setup & DEBUGGER_INTEGRATIONS if debuggers: _LOGGER.debug("Setting up debuggers: %s", debuggers) await async_setup_multi_components(opp, debuggers, config) # calculate what components to setup in what stage stage_1_domains = set() # Find all dependencies of any dependency of any stage 1 integration that # we plan on loading and promote them to stage 1 deps_promotion = STAGE_1_INTEGRATIONS while deps_promotion: old_deps_promotion = deps_promotion deps_promotion = set() for domain in old_deps_promotion: if domain not in domains_to_setup or domain in stage_1_domains: continue stage_1_domains.add(domain) dep_itg = integration_cache.get(domain) if dep_itg is None: continue deps_promotion.update(dep_itg.all_dependencies) stage_2_domains = domains_to_setup - logging_domains - debuggers - stage_1_domains # Load the registries await asyncio.gather( device_registry.async_load(opp), entity_registry.async_load(opp), area_registry.async_load(opp), ) # Start setup if stage_1_domains: _LOGGER.info("Setting up stage 1: %s", stage_1_domains) try: async with opp.timeout.async_timeout(STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME): await async_setup_multi_components(opp, stage_1_domains, config) except asyncio.TimeoutError: _LOGGER.warning("Setup timed out for stage 1 - moving forward") # Enables after dependencies async_set_domains_to_be_loaded(opp, stage_2_domains) if stage_2_domains: _LOGGER.info("Setting up stage 2: %s", stage_2_domains) try: async with opp.timeout.async_timeout(STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME): await async_setup_multi_components(opp, stage_2_domains, config) except asyncio.TimeoutError: _LOGGER.warning("Setup timed out for stage 2 - moving forward") watch_task.cancel() async_dispatcher_send(opp, SIGNAL_BOOTSTRAP_INTEGRATONS, {}) _LOGGER.debug( "Integration setup times: %s", { integration: timedelta.total_seconds() for integration, timedelta in sorted( setup_time.items(), key=lambda item: item[1].total_seconds() # type: ignore ) }, ) # Wrap up startup _LOGGER.debug("Waiting for startup to wrap up") try: async with opp.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME): await opp.async_block_till_done() except asyncio.TimeoutError: _LOGGER.warning("Setup timed out for bootstrap - moving forward")
async def async_get_all_descriptions( opp: OpenPeerPower, ) -> dict[str, dict[str, Any]]: """Return descriptions (i.e. user documentation) for all service calls.""" descriptions_cache = opp.data.setdefault(SERVICE_DESCRIPTION_CACHE, {}) format_cache_key = "{}.{}".format services = opp.services.async_services() # See if there are new services not seen before. # Any service that we saw before already has an entry in description_cache. missing = set() for domain in services: for service in services[domain]: if format_cache_key(domain, service) not in descriptions_cache: missing.add(domain) break # Files we loaded for missing descriptions loaded = {} if missing: integrations = await gather_with_concurrency( MAX_LOAD_CONCURRENTLY, *(async_get_integration(opp, domain) for domain in missing), ) contents = await opp.async_add_executor_job(_load_services_files, opp, integrations) for domain, content in zip(missing, contents): loaded[domain] = content # Build response descriptions: dict[str, dict[str, Any]] = {} for domain in services: descriptions[domain] = {} for service in services[domain]: cache_key = format_cache_key(domain, service) description = descriptions_cache.get(cache_key) # Cache missing descriptions if description is None: domain_yaml = loaded[domain] yaml_description = domain_yaml.get(service, {}) # type: ignore # Don't warn for missing services, because it triggers false # positives for things like scripts, that register as a service description = { "name": yaml_description.get("name", ""), "description": yaml_description.get("description", ""), "fields": yaml_description.get("fields", {}), } if "target" in yaml_description: description["target"] = yaml_description["target"] descriptions_cache[cache_key] = description descriptions[domain][service] = description return descriptions