async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up mütesync from a config entry.""" client = mutesync.PyMutesync( entry.data["token"], entry.data["host"], hass.helpers.aiohttp_client.async_get_clientsession(), ) async def update_data(): """Update the data.""" async with async_timeout.timeout(2.5): return await client.get_state() coordinator = hass.data.setdefault( DOMAIN, {})[entry.entry_id] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_interval=timedelta(seconds=5), update_method=update_data, ) await coordinator.async_config_entry_first_refresh() hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True
async def get_coordinator( hass: HomeAssistant, ) -> update_coordinator.DataUpdateCoordinator: """Get the data update coordinator.""" if DOMAIN in hass.data: return hass.data[DOMAIN] async def async_get_cases(): with async_timeout.timeout(10): return { case.country: case for case in await coronavirus.get_cases( aiohttp_client.async_get_clientsession(hass) ) } hass.data[DOMAIN] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_cases, update_interval=timedelta(hours=1), ) await hass.data[DOMAIN].async_refresh() return hass.data[DOMAIN]
async def get_coordinator(hass, entry): """Get the data update coordinator.""" async def async_get_data(): _LOGGER.info("Run query to server") poolsense = PoolSense() return_data = {} with async_timeout.timeout(10): try: return_data = await poolsense.get_poolsense_data( aiohttp_client.async_get_clientsession(hass), entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], ) except (PoolSenseError) as error: raise UpdateFailed(error) return return_data return update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_data, update_interval=timedelta(hours=1), )
async def get_coordinator(hass): """Get the data update coordinator.""" if DOMAIN in hass.data: return hass.data[DOMAIN] async def async_get_cases(): try: with async_timeout.timeout(10): return { case.country: case for case in await coronavirus.get_cases( aiohttp_client.async_get_clientsession(hass)) } except (asyncio.TimeoutError, aiohttp.ClientError): raise update_coordinator.UpdateFailed hass.data[DOMAIN] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_cases, update_interval=timedelta(hours=1), ) await hass.data[DOMAIN].async_refresh() return hass.data[DOMAIN]
async def get_coordinator(hass: core.HomeAssistant, api: RKICovidAPI): """Get the data update coordinator.""" if DOMAIN in hass.data: return hass.data[DOMAIN] async def async_get_districts(): """Fetch data from API endpoint. Here the data for each district is loaded. """ try: with async_timeout.timeout(10): return { case.county: case for case in await api.load_districts() } except (asyncio.TimeoutError, aiohttp.ClientError) as err: raise update_coordinator.UpdateFailed( f"Error communicating with API: {err}") hass.data[DOMAIN] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_districts, update_interval=timedelta(hours=3), ) await hass.data[DOMAIN].async_refresh() return hass.data[DOMAIN]
async def async_setup_entry(hass: HomeAssistant, entry: dict): """Set up Mullvad VPN integration.""" async def async_get_mullvad_api_data(): with async_timeout.timeout(10): api = await hass.async_add_executor_job(MullvadAPI) return api.data coordinator = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_mullvad_api_data, update_interval=timedelta(minutes=1), ) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data[DOMAIN] = coordinator for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) return True
async def get_coordinator(hass, config): """Get the data update coordinator.""" try: return hass.data[DOMAIN][ f"{config.get('lineId')}-{config.get('stationId')}"] except: pass async def async_update_data(): with async_timeout.timeout(10): return [ trip for trip in await hass.data[DOMAIN]['instance'].get_bustime( config['lineId'], config['stationId']) ] hass.data[DOMAIN][ f"{config.get('lineId')}-{config.get('stationId')}"] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=f"{DOMAIN}-{config.get('lineId')}-{config.get('stationId')}", update_method=async_update_data, update_interval=timedelta(seconds=10), ) await hass.data[DOMAIN][f"{config.get('lineId')}-{config.get('stationId')}" ].async_refresh() return hass.data[DOMAIN][ f"{config.get('lineId')}-{config.get('stationId')}"]
def crd(hass): """Coordinator mock.""" calls = [] async def refresh(): calls.append(None) return len(calls) crd = update_coordinator.DataUpdateCoordinator( hass, LOGGER, name="test", update_method=refresh, update_interval=timedelta(seconds=10), ) return crd
def crd(hass): """Coordinator mock.""" calls = 0 async def refresh(): nonlocal calls calls += 1 return calls crd = update_coordinator.DataUpdateCoordinator( hass, LOGGER, name="test", update_method=refresh, update_interval=timedelta(seconds=10), ) return crd
def get_crd(hass, update_interval): """Make coordinator mocks.""" calls = 0 async def refresh(): nonlocal calls calls += 1 return calls crd = update_coordinator.DataUpdateCoordinator( hass, LOGGER, name="test", update_method=refresh, update_interval=update_interval, ) return crd
async def get_coordinator(hass): """Get the data update coordinator.""" if DOMAIN in hass.data: return hass.data[DOMAIN] async def async_get_cases(): with async_timeout.timeout(10): return await get_vaccination_stats( aiohttp_client.async_get_clientsession(hass) ) hass.data[DOMAIN] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_cases, update_interval=timedelta(hours=4), ) await hass.data[DOMAIN].async_refresh() return hass.data[DOMAIN]
async def async_setup_entry(hass: HomeAssistant, entry: dict) -> bool: """Set up Mullvad VPN integration.""" async def async_get_mullvad_api_data(): async with async_timeout.timeout(10): api = await hass.async_add_executor_job(MullvadAPI) return api.data coordinator = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_mullvad_api_data, update_interval=timedelta(minutes=1), ) await coordinator.async_config_entry_first_refresh() hass.data[DOMAIN] = coordinator hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up mütesync from a config entry.""" client = mutesync.PyMutesync( entry.data["token"], entry.data["host"], hass.helpers.aiohttp_client.async_get_clientsession(), ) async def update_data(): """Update the data.""" async with async_timeout.timeout(2.5): state = await client.get_state() if state["muted"] is None or state["in_meeting"] is None: raise update_coordinator.UpdateFailed("Got invalid response") if state["in_meeting"]: coordinator.update_interval = UPDATE_INTERVAL_IN_MEETING else: coordinator.update_interval = UPDATE_INTERVAL_NOT_IN_MEETING return state coordinator = hass.data.setdefault( DOMAIN, {})[entry.entry_id] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_interval=UPDATE_INTERVAL_NOT_IN_MEETING, update_method=update_data, ) await coordinator.async_config_entry_first_refresh() hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True
async def get_coordinator(hass, key, base): """Get the data update coordinator.""" if DOMAIN not in hass.data: hass.data[DOMAIN] = {} if base in hass.data[DOMAIN]: return hass.data[DOMAIN][base] async def async_get_base(): with async_timeout.timeout(10): session = aiohttp_client.async_get_clientsession(hass) result = await session.get(MAIN_URL.format(apikey=key, base=base)) data = await result.json() return data["conversion_rates"] hass.data[DOMAIN][base] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_base, update_interval=timedelta(hours=24), ) await hass.data[DOMAIN][base].async_refresh() return hass.data[DOMAIN][base]
async def async_setup_entry(hass, entry, async_add_entities): """Set up the Tibber sensor.""" tibber_connection = hass.data.get(TIBBER_DOMAIN) entity_registry = async_get_entity_reg(hass) device_registry = async_get_dev_reg(hass) coordinator = None entities = [] for home in tibber_connection.get_homes(only_active=False): try: await home.update_info() except asyncio.TimeoutError as err: _LOGGER.error("Timeout connecting to Tibber home: %s ", err) raise PlatformNotReady() from err except aiohttp.ClientError as err: _LOGGER.error("Error connecting to Tibber home: %s ", err) raise PlatformNotReady() from err if home.has_active_subscription: entities.append(TibberSensorElPrice(home)) if home.has_real_time_consumption: await home.rt_subscribe( TibberRtDataCoordinator(async_add_entities, home, hass).async_set_updated_data) if home.has_active_subscription and not home.has_real_time_consumption: if coordinator is None: coordinator = update_coordinator.DataUpdateCoordinator( hass, _LOGGER, name=f"Tibber {tibber_connection.name}", update_method=tibber_connection. fetch_consumption_data_active_homes, update_interval=timedelta(hours=1), ) for entity_description in SENSORS: entities.append( TibberDataSensor(home, coordinator, entity_description)) # migrate old_id = home.info["viewer"]["home"]["meteringPointData"][ "consumptionEan"] if old_id is None: continue # migrate to new device ids old_entity_id = entity_registry.async_get_entity_id( "sensor", TIBBER_DOMAIN, old_id) if old_entity_id is not None: entity_registry.async_update_entity(old_entity_id, new_unique_id=home.home_id) # migrate to new device ids device_entry = device_registry.async_get_device({(TIBBER_DOMAIN, old_id)}) if device_entry and entry.entry_id in device_entry.config_entries: device_registry.async_update_device(device_entry.id, new_identifiers={ (TIBBER_DOMAIN, home.home_id) }) async_add_entities(entities, True)
async def async_setup(hass, config): """Set up the updater component.""" if "dev" in current_version: # This component only makes sense in release versions _LOGGER.info("Running on 'dev', only analytics will be submitted") conf = config.get(DOMAIN, {}) if conf.get(CONF_REPORTING): huuid = await hass.helpers.instance_id.async_get() else: huuid = None include_components = conf.get(CONF_COMPONENT_REPORTING) async def check_new_version(): """Check if a new version is available and report if one is.""" newest, release_notes = await get_newest_version( hass, huuid, include_components) _LOGGER.debug("Fetched version %s: %s", newest, release_notes) # Skip on dev if "dev" in current_version: return Updater(False, "", "") # Load data from supervisor on Hass.io if hass.components.hassio.is_hassio(): newest = hass.components.hassio.get_homeassistant_version() # Validate version update_available = False if StrictVersion(newest) > StrictVersion(current_version): _LOGGER.debug( "The latest available version of Home Assistant is %s", newest) update_available = True elif StrictVersion(newest) == StrictVersion(current_version): _LOGGER.debug( "You are on the latest version (%s) of Home Assistant", newest) elif StrictVersion(newest) < StrictVersion(current_version): _LOGGER.debug( "Local version is newer than the latest version (%s)", newest) _LOGGER.debug("Update available: %s", update_available) return Updater(update_available, newest, release_notes) coordinator = hass.data[DOMAIN] = update_coordinator.DataUpdateCoordinator( hass, _LOGGER, name="Home Assistant update", update_method=check_new_version, update_interval=timedelta(days=1), ) # This can take up to 15s which can delay startup asyncio.create_task(coordinator.async_refresh()) hass.async_create_task( discovery.async_load_platform(hass, "binary_sensor", DOMAIN, {}, config)) return True
async def get_coordinator(hass: core.HomeAssistant, parser: RkiCovidParser): """Get the data update coordinator.""" _LOGGER.debug("initialize the data coordinator.") if DOMAIN in hass.data: return hass.data[DOMAIN] async def async_get_districts(): """Fetch data from rki-covid-parser library. Here the data for each district is loaded. """ _LOGGER.debug("fetch data from rki-covid-parser.") try: with async_timeout.timeout(30): # return {case.county: case for case in await api.load_districts()} await parser.load_data() _LOGGER.debug("fetching finished.") items = {} # districts for d in parser.districts: district = parser.districts[d] items[district.county] = DistrictData( district.name, district.county, district.state, district.population, district.cases, district.deaths, district.casesPerWeek, district.recovered, district.weekIncidence, district.casesPer100k, district.newCases, district.newDeaths, district.newRecovered, district.lastUpdate, ) # states for s in parser.states: state = parser.states[s] name = "BL " + state.name items[name] = DistrictData( name, name, None, state.population, state.cases, state.deaths, state.casesPerWeek, state.recovered, state.weekIncidence, state.casesPer100k, state.newCases, state.newDeaths, state.newRecovered, state.lastUpdate, ) # country items["Deutschland"] = DistrictData( "Deutschland", "Deutschland", None, parser.country.population, parser.country.cases, parser.country.deaths, parser.country.casesPerWeek, parser.country.recovered, parser.country.weekIncidence, parser.country.casesPer100k, parser.country.newCases, parser.country.newDeaths, parser.country.newRecovered, parser.country.lastUpdate, ) _LOGGER.debug("parsing data finished.") return items except asyncio.TimeoutError as err: raise update_coordinator.UpdateFailed( f"Error reading data from rki-covid-parser timed-out: {err}" ) except aiohttp.ClientError as err: raise update_coordinator.UpdateFailed( f"Error reading data from rki-covid-parser by client: {err}" ) hass.data[DOMAIN] = update_coordinator.DataUpdateCoordinator( hass, logging.getLogger(__name__), name=DOMAIN, update_method=async_get_districts, update_interval=timedelta(hours=3), ) await hass.data[DOMAIN].async_refresh() return hass.data[DOMAIN]