async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Load the saved entities.""" _LOGGER.info( "Version %s is starting, if you have any issues please report" " them here: %s", VERSION, ISSUE_URL, ) hass.data.setdefault(DOMAIN, {}) updated_config = config_entry.data.copy() # Set amazon fwd blank if missing if CONF_AMAZON_FWDS not in updated_config.keys(): updated_config[CONF_AMAZON_FWDS] = [] # Set default timeout if missing if CONF_IMAP_TIMEOUT not in updated_config.keys(): updated_config[CONF_IMAP_TIMEOUT] = DEFAULT_IMAP_TIMEOUT # Set external path off by default if CONF_ALLOW_EXTERNAL not in config_entry.data.keys(): updated_config[CONF_ALLOW_EXTERNAL] = False updated_config[CONF_PATH] = default_image_path(hass, config_entry) # Set image security always on if CONF_IMAGE_SECURITY not in config_entry.data.keys(): updated_config[CONF_IMAGE_SECURITY] = True # Sort the resources updated_config[CONF_RESOURCES] = sorted(updated_config[CONF_RESOURCES]) if updated_config != config_entry.data: hass.config_entries.async_update_entry(config_entry, data=updated_config) config_entry.add_update_listener(update_listener) config_entry.options = config_entry.data config = config_entry.data # Variables for data coordinator host = config.get(CONF_HOST) timeout = config.get(CONF_IMAP_TIMEOUT) interval = config.get(CONF_SCAN_INTERVAL) # Setup the data coordinator coordinator = MailDataUpdateCoordinator(hass, host, timeout, interval, config) # Fetch initial data so we have data when entities subscribe await coordinator.async_refresh() hass.data[DOMAIN][config_entry.entry_id] = { COORDINATOR: coordinator, } for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, platform) ) return True
async def async_migrate_entry(hass, config_entry: ConfigEntry): """Migrate old entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) # Flatten configuration but keep old data if user rollbacks HASS if config_entry.version == 1: options = {**config_entry.options} options.setdefault( OPTIONS_GENERAL, { OPTIONS_GENERAL_POLL_INTERVAL: config_entry.data.get(CONF_POLL_INTERVAL, DEFAULT_POLL_INTERVAL) }) config_entry.options = {**options} new = {**config_entry.data} try: new.pop(CONF_POLL_INTERVAL) new.pop( CONF_OPTIONS ) # get rid of errorneously migrated options from integration 1.0 except: pass config_entry.data = {**new} config_entry.version = 2 _LOGGER.info("Migration to version %s successful", config_entry.version) return True
async def load_entry(self, entry: ConfigEntry) -> None: """Add a new entry""" entry.options = entry.data # Sync data/options _device = await self._get_device(entry.data[ATTR_ENTITY_ID]) self._entries[entry.entry_id] = { ENTRY: entry, UPDATE_LISTENER: entry.add_update_listener(update_listener), SENSORS: {}, LOCK_INFO: { LOCK_MANUFACTURER: _device.manufacturer, LOCK_MODEL: _device.model, } } # Picking one of the entries notifiers as a fallback notifier if not self._default_notifier and CONF_NOTIFY in entry.data and entry.data[ CONF_NOTIFY]: self._default_notifier = entry.data[CONF_NOTIFY] # Adding events we want to watch to the watch list for d in DEVICES_WITH_EVENTS: if entry.data[d]: self._event_watch_list[entry.data[d]] = { ATTR_ENTITY_ID: entry.data[d], ENTRY_TYPE: d, ENTRY_ID: entry.entry_id } for component in PLATFORMS: self._hass.async_create_task( self._hass.config_entries.async_forward_entry_setup( entry, component))
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up this integration using UI.""" _LOGGER.debug( "Setting %s (%s) from ConfigFlow", config_entry.title, config_entry.data[const.CONF_FREQUENCY], ) # Backward compatibility - clean-up (can be removed later?) config_entry.options = {} config_entry.add_update_listener(update_listener) # Add sensor hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, const.SENSOR_PLATFORM)) return True
async def test_user_owserver_options_set_single( hass: HomeAssistant, config_entry: ConfigEntry, owproxy: MagicMock, ): """Test configuring a single device.""" setup_owproxy_mock_devices( owproxy, Platform.SENSOR, [x for x in MOCK_OWPROXY_DEVICES if "28." in x] ) # Clear config options to certify functionality when starting from scratch config_entry.options = {} # Verify that first config step comes back with a selection list of all the 28-family devices await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["data_schema"].schema["device_selection"].options == { "28.111111111111": False, "28.222222222222": False, "28.222222222223": False, } # Verify that a single selected device to configure comes back as a form with the device to configure result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={INPUT_ENTRY_DEVICE_SELECTION: ["28.111111111111"]}, ) assert result["type"] == RESULT_TYPE_FORM assert result["description_placeholders"]["sensor_id"] == "28.111111111111" # Verify that the setting for the device comes back as default when no input is given result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={}, ) assert result["type"] == RESULT_TYPE_CREATE_ENTRY assert ( result["data"]["device_options"]["28.111111111111"]["precision"] == "temperature" )
async def async_migrate_entry(hass, config_entry: ConfigEntry): """Migrate old entry.""" _LOGGER.info("Migrating from version %s", config_entry.version) if config_entry.version == 1: options = {**config_entry.options} # modify Config Entry data if "monitored_conditions" in options: options.pop("monitored_conditions") if "monitored_eq_conditions" in options: options.pop("monitored_eq_conditions") config_entry.options = {**options} config_entry.version = 2 _LOGGER.info("Migration to version %s successful", config_entry.version) return True
async def platform_async_setup_entry( hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities, *, component_key: str, info_type, entity_type, state_type, ) -> bool: """Set up this integration using UI.""" if config_entry.source == config_entries.SOURCE_IMPORT: # We get here if the integration is set up using YAML hass.async_create_task( hass.config_entries.async_remove(config_entry.entry_id)) return False # Print startup message config_entry.options = config_entry.data config_entry.add_update_listener(update_listener) # Add sensor return await hass.config_entries.async_forward_entry_setup( config_entry, PLATFORM)
async def async_migrate_entry(hass, config_entry: ConfigEntry): """Migrate old entry.""" LOGGER.debug("Migrating from version %s", config_entry.version) if config_entry.version == 1: # move from everything in data to splitting data and options save_data = config_entry.data config_keys = [CONF_HOST, CONF_PORT] config_entry.data = MappingProxyType({ CONF_HOST: save_data[CONF_HOST], CONF_PORT: save_data.get(CONF_PORT, DEFAULT_PORT), }) config_entry.options = MappingProxyType({ key: value for (key, value) in save_data.items() if key not in config_keys }) config_entry.version = 2 LOGGER.info("Migration to version %s successful", config_entry.version) return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Load the saved entities.""" _LOGGER.info( "Version %s is starting, if you have any issues please report" " them here: %s", VERSION, ISSUE_URL, ) hass.data.setdefault(DOMAIN, {}) updated_config = config_entry.data.copy() # Set amazon fwd blank if missing if CONF_AMAZON_FWDS not in updated_config.keys(): updated_config[CONF_AMAZON_FWDS] = [] # Set default timeout if missing if CONF_IMAP_TIMEOUT not in updated_config.keys(): updated_config[CONF_IMAP_TIMEOUT] = DEFAULT_IMAP_TIMEOUT # Set external path off by default if CONF_ALLOW_EXTERNAL not in config_entry.data.keys(): updated_config[CONF_ALLOW_EXTERNAL] = False updated_config[CONF_PATH] = default_image_path(hass, config_entry) # Set image security always on if CONF_IMAGE_SECURITY not in config_entry.data.keys(): updated_config[CONF_IMAGE_SECURITY] = True # Sort the resources updated_config[CONF_RESOURCES] = sorted(updated_config[CONF_RESOURCES]) if updated_config != config_entry.data: hass.config_entries.async_update_entry(config_entry, data=updated_config) config_entry.add_update_listener(update_listener) config_entry.options = config_entry.data config = config_entry.data async def async_update_data(): """Fetch data """ async with async_timeout.timeout(config.get(CONF_IMAP_TIMEOUT)): return await hass.async_add_executor_job(process_emails, hass, config) coordinator = DataUpdateCoordinator( hass, _LOGGER, name=f"Mail and Packages ({config.get(CONF_HOST)})", update_method=async_update_data, update_interval=timedelta( minutes=config_entry.data.get(CONF_SCAN_INTERVAL)), ) # Fetch initial data so we have data when entities subscribe await coordinator.async_refresh() hass.data[DOMAIN][config_entry.entry_id] = { COORDINATOR: coordinator, } try: hass.async_create_task( hass.config_entries.async_forward_entry_setup( config_entry, PLATFORM)) except ValueError: pass return True
async def async_migrate_entry(hass, entry: ConfigEntry): """Migrate to latest config format.""" CONF_TYPE_AUTO = "auto" CONF_DISPLAY_LIGHT = "display_light" CONF_CHILD_LOCK = "child_lock" if entry.version == 1: # Removal of Auto detection. config = {**entry.data, **entry.options, "name": entry.title} opts = {**entry.options} if config[CONF_TYPE] == CONF_TYPE_AUTO: device = setup_device(hass, config) config[CONF_TYPE] = await device.async_inferred_type() if config[CONF_TYPE] is None: return False entry.data = { CONF_DEVICE_ID: config[CONF_DEVICE_ID], CONF_LOCAL_KEY: config[CONF_LOCAL_KEY], CONF_HOST: config[CONF_HOST], } if CONF_CHILD_LOCK in config: opts.pop(CONF_CHILD_LOCK, False) opts[CONF_LOCK] = config[CONF_CHILD_LOCK] if CONF_DISPLAY_LIGHT in config: opts.pop(CONF_DISPLAY_LIGHT, False) opts[CONF_LIGHT] = config[CONF_DISPLAY_LIGHT] entry.options = {**opts} entry.version = 2 if entry.version == 2: # CONF_TYPE is not configurable, move it from options to the main config. config = {**entry.data, **entry.options, "name": entry.title} opts = {**entry.options} # Ensure type has been migrated. Some users are reporting errors which # suggest it was removed completely. But that is probably due to # overwriting options without CONF_TYPE. if config.get(CONF_TYPE, CONF_TYPE_AUTO) == CONF_TYPE_AUTO: device = setup_device(hass, config) config[CONF_TYPE] = await device.async_inferred_type() if config[CONF_TYPE] is None: return False entry.data = { CONF_DEVICE_ID: config[CONF_DEVICE_ID], CONF_LOCAL_KEY: config[CONF_LOCAL_KEY], CONF_HOST: config[CONF_HOST], CONF_TYPE: config[CONF_TYPE], } opts.pop(CONF_TYPE, None) entry.options = {**opts} entry.version = 3 if entry.version == 3: # Migrate to filename based config_type, to avoid needing to # parse config files to find the right one. config = {**entry.data, **entry.options, "name": entry.title} config_type = get_config(config[CONF_TYPE]).config_type # Special case for kogan_switch. Consider also v2. if config_type == "smartplugv1": device = setup_device(hass, config) config_type = await device.async_inferred_type() if config_type != "smartplugv2": config_type = "smartplugv1" entry.data = { CONF_DEVICE_ID: config[CONF_DEVICE_ID], CONF_LOCAL_KEY: config[CONF_LOCAL_KEY], CONF_HOST: config[CONF_HOST], CONF_TYPE: config_type, } entry.version = 4 return True
async def async_migrate_entry(_, config_entry: ConfigEntry) -> bool: """Migrate old entry.""" _LOGGER.info("Migrating %s from version %s", config_entry.title, config_entry.version) new_data = {**config_entry.data} new_options = {**config_entry.options} removed_data: Dict[str, Any] = {} removed_options: Dict[str, Any] = {} _LOGGER.debug("new_data %s", new_data) _LOGGER.debug("new_options %s", new_options) if config_entry.version == 1: to_remove = [ "offset", "move_country_holidays", "holiday_in_week_move", "holiday_pop_named", "holiday_move_offset", "prov", "state", "observed", "exclude_dates", "include_dates", ] for remove in to_remove: if remove in new_data: removed_data[remove] = new_data[remove] del new_data[remove] if remove in new_options: removed_options[remove] = new_options[remove] del new_options[remove] if new_data.get(const.CONF_FREQUENCY) in const.MONTHLY_FREQUENCY: if const.CONF_WEEK_ORDER_NUMBER in new_data: new_data[const.CONF_WEEKDAY_ORDER_NUMBER] = new_data[ const.CONF_WEEK_ORDER_NUMBER] new_data[const.CONF_FORCE_WEEK_NUMBERS] = True del new_data[const.CONF_WEEK_ORDER_NUMBER] else: new_data[const.CONF_FORCE_WEEK_NUMBERS] = False _LOGGER.info("Updated data config for week_order_number") if new_options.get(const.CONF_FREQUENCY) in const.MONTHLY_FREQUENCY: if const.CONF_WEEK_ORDER_NUMBER in new_options: new_options[const.CONF_WEEKDAY_ORDER_NUMBER] = new_options[ const.CONF_WEEK_ORDER_NUMBER] new_options[const.CONF_FORCE_WEEK_NUMBERS] = True del new_options[const.CONF_WEEK_ORDER_NUMBER] _LOGGER.info("Updated options config for week_order_number") else: new_options[const.CONF_FORCE_WEEK_NUMBERS] = False if config_entry.version <= 4: if const.CONF_WEEKDAY_ORDER_NUMBER in new_data: new_data[const.CONF_WEEKDAY_ORDER_NUMBER] = list( map(str, new_data[const.CONF_WEEKDAY_ORDER_NUMBER])) if const.CONF_WEEKDAY_ORDER_NUMBER in new_options: new_options[const.CONF_WEEKDAY_ORDER_NUMBER] = list( map(str, new_options[const.CONF_WEEKDAY_ORDER_NUMBER])) config_entry.version = const.VERSION config_entry.data = {**new_data} config_entry.options = {**new_options} if removed_data: _LOGGER.error( "Removed data config %s. " "Please check the documentation how to configure the functionality.", removed_data, ) if removed_options: _LOGGER.error( "Removed options config %s. " "Please check the documentation how to configure the functionality.", removed_options, ) _LOGGER.info( "%s migration to version %s successful", config_entry.title, config_entry.version, ) return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up the Ecowitt component from UI.""" if hass.data.get(DOMAIN) is None: hass.data.setdefault(DOMAIN, {}) # if options existed in the YAML but not in the config entry, add if (not entry.options and entry.source == SOURCE_IMPORT and hass.data.get(DOMAIN) and hass.data[DOMAIN].get(DATA_OPTIONS)): hass.config_entries.async_update_entry( entry=entry, options=hass.data[DOMAIN][DATA_OPTIONS], ) # Store config hass.data[DOMAIN][entry.entry_id] = {} ecowitt_data = hass.data[DOMAIN][entry.entry_id] ecowitt_data[DATA_STATION] = {} ecowitt_data[DATA_READY] = False ecowitt_data[REG_ENTITIES] = {} for pl in ECOWITT_PLATFORMS: ecowitt_data[REG_ENTITIES][pl] = [] if not entry.options: entry.options = { CONF_UNIT_BARO: CONF_UNIT_SYSTEM_METRIC, CONF_UNIT_WIND: CONF_UNIT_SYSTEM_IMPERIAL, CONF_UNIT_RAIN: CONF_UNIT_SYSTEM_IMPERIAL, CONF_UNIT_LIGHTNING: CONF_UNIT_SYSTEM_IMPERIAL, CONF_UNIT_WINDCHILL: W_TYPE_HYBRID, } # preload some model info stationinfo = ecowitt_data[DATA_STATION] stationinfo[DATA_STATIONTYPE] = "Unknown" stationinfo[DATA_FREQ] = "Unknown" stationinfo[DATA_MODEL] = "Unknown" # setup the base connection ws = EcoWittListener(port=entry.data[CONF_PORT]) ecowitt_data[DATA_ECOWITT] = ws if entry.options[CONF_UNIT_WINDCHILL] == W_TYPE_OLD: ws.set_windchill(WINDCHILL_OLD) if entry.options[CONF_UNIT_WINDCHILL] == W_TYPE_NEW: ws.set_windchill(WINDCHILL_NEW) if entry.options[CONF_UNIT_WINDCHILL] == W_TYPE_HYBRID: ws.set_windchill(WINDCHILL_HYBRID) hass.loop.create_task(ws.listen()) async def close_server(*args): """ Close the ecowitt server.""" await ws.stop() def check_imp_metric_sensor(sensor): """Check if this is the wrong sensor for our config (imp/metric).""" # Is this a metric or imperial sensor, lookup and skip name, uom, kind, device_class, icon, metric = SENSOR_TYPES[sensor] if metric == 0: return True if "baro" in sensor: if (entry.options[CONF_UNIT_BARO] == CONF_UNIT_SYSTEM_IMPERIAL and metric == S_METRIC): return False if (entry.options[CONF_UNIT_BARO] == CONF_UNIT_SYSTEM_METRIC and metric == S_IMPERIAL): return False if "rain" in sensor: if (entry.options[CONF_UNIT_RAIN] == CONF_UNIT_SYSTEM_IMPERIAL and metric == S_METRIC): return False if (entry.options[CONF_UNIT_RAIN] == CONF_UNIT_SYSTEM_METRIC and metric == S_IMPERIAL): return False if "windchill" not in sensor and ("wind" in sensor or "gust" in sensor): if (entry.options[CONF_UNIT_WIND] == CONF_UNIT_SYSTEM_IMPERIAL and metric != S_IMPERIAL): return False if (entry.options[CONF_UNIT_WIND] == CONF_UNIT_SYSTEM_METRIC and metric != S_METRIC): return False if (entry.options[CONF_UNIT_WIND] == CONF_UNIT_SYSTEM_METRIC_MS and metric != S_METRIC_MS): return False if (sensor == 'lightning' and entry.options[CONF_UNIT_LIGHTNING] == CONF_UNIT_SYSTEM_IMPERIAL): return False if (sensor == 'lightning_mi' and entry.options[CONF_UNIT_LIGHTNING] == CONF_UNIT_SYSTEM_METRIC): return False return True def check_and_append_sensor(sensor): """Check the sensor for validity, and append to new entitiy list.""" if sensor not in SENSOR_TYPES: if sensor not in IGNORED_SENSORS: _LOGGER.warning("Unhandled sensor type %s", sensor) return None # Is this a metric or imperial sensor, lookup and skip if not check_imp_metric_sensor(sensor): return None name, uom, kind, device_class, icon, metric = SENSOR_TYPES[sensor] ecowitt_data[REG_ENTITIES][kind].append(sensor) return kind async def _first_data_rec(weather_data): _LOGGER.info("First ecowitt data recd, setting up sensors.") # check if we have model info, etc. if DATA_PASSKEY in ws.last_values: stationinfo[DATA_PASSKEY] = ws.last_values[DATA_PASSKEY] ws.last_values.pop(DATA_PASSKEY, None) else: _LOGGER.error("No passkey, cannot set unique id.") return False if DATA_STATIONTYPE in ws.last_values: stationinfo[DATA_STATIONTYPE] = ws.last_values[DATA_STATIONTYPE] ws.last_values.pop(DATA_STATIONTYPE, None) if DATA_FREQ in ws.last_values: stationinfo[DATA_FREQ] = ws.last_values[DATA_FREQ] ws.last_values.pop(DATA_FREQ, None) if DATA_MODEL in ws.last_values: stationinfo[DATA_MODEL] = ws.last_values[DATA_MODEL] ws.last_values.pop(DATA_MODEL, None) # load the sensors we have for sensor in ws.last_values.keys(): check_and_append_sensor(sensor) if (not ecowitt_data[REG_ENTITIES][TYPE_SENSOR] and not ecowitt_data[REG_ENTITIES][TYPE_BINARY_SENSOR]): _LOGGER.error("No sensors found to monitor, check device config.") return False for component in ECOWITT_PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup( entry, component)) ecowitt_data[DATA_READY] = True async def _async_ecowitt_update_cb(weather_data): """Primary update callback called from pyecowitt.""" _LOGGER.debug("Primary update callback triggered.") new_sensors = {} for component in ECOWITT_PLATFORMS: new_sensors[component] = [] if not hass.data[DOMAIN][entry.entry_id][DATA_READY]: await _first_data_rec(weather_data) return for sensor in weather_data.keys(): if sensor not in SENSOR_TYPES: if sensor not in IGNORED_SENSORS: _LOGGER.warning( "Unhandled sensor type %s value %s, " + "file a PR.", sensor, weather_data[sensor]) elif (sensor not in ecowitt_data[REG_ENTITIES][TYPE_SENSOR] and sensor not in ecowitt_data[REG_ENTITIES][TYPE_BINARY_SENSOR] and sensor not in IGNORED_SENSORS and check_imp_metric_sensor(sensor)): _LOGGER.warning( "Unregistered sensor type %s value %s received.", sensor, weather_data[sensor]) # try to register the sensor kind = check_and_append_sensor(sensor) if kind is not None: new_sensors[kind].append(sensor) # if we have new sensors, set them up. for component in ECOWITT_PLATFORMS: if new_sensors[component]: signal = f"{SIGNAL_ADD_ENTITIES}_{component}" async_dispatcher_send(hass, signal, new_sensors[component]) async_dispatcher_send(hass, DOMAIN) # this is part of the base async_setup_entry ws.register_listener(_async_ecowitt_update_cb) return True
async def async_migrate_entry(hass, entry: ConfigEntry): """Migrate to latest config format.""" CONF_TYPE_AUTO = "auto" CONF_DISPLAY_LIGHT = "display_light" CONF_CHILD_LOCK = "child_lock" if entry.version == 1: # Removal of Auto detection. config = {**entry.data, **entry.options, "name": entry.title} opts = {**entry.options} if config[CONF_TYPE] == CONF_TYPE_AUTO: device = setup_device(hass, config) config[CONF_TYPE] = await device.async_inferred_type() if config[CONF_TYPE] is None: _LOGGER.error( f"Unable to determine type for device {config[CONF_DEVICE_ID]}." ) return False entry.data = { CONF_DEVICE_ID: config[CONF_DEVICE_ID], CONF_LOCAL_KEY: config[CONF_LOCAL_KEY], CONF_HOST: config[CONF_HOST], } if CONF_CHILD_LOCK in config: opts.pop(CONF_CHILD_LOCK, False) opts[CONF_LOCK] = config[CONF_CHILD_LOCK] if CONF_DISPLAY_LIGHT in config: opts.pop(CONF_DISPLAY_LIGHT, False) opts[CONF_LIGHT] = config[CONF_DISPLAY_LIGHT] entry.options = {**opts} entry.version = 2 if entry.version == 2: # CONF_TYPE is not configurable, move it from options to the main config. config = {**entry.data, **entry.options, "name": entry.title} opts = {**entry.options} # Ensure type has been migrated. Some users are reporting errors which # suggest it was removed completely. But that is probably due to # overwriting options without CONF_TYPE. if config.get(CONF_TYPE, CONF_TYPE_AUTO) == CONF_TYPE_AUTO: device = setup_device(hass, config) config[CONF_TYPE] = await device.async_inferred_type() if config[CONF_TYPE] is None: _LOGGER.error( f"Unable to determine type for device {config[CONF_DEVICE_ID]}." ) return False entry.data = { CONF_DEVICE_ID: config[CONF_DEVICE_ID], CONF_LOCAL_KEY: config[CONF_LOCAL_KEY], CONF_HOST: config[CONF_HOST], CONF_TYPE: config[CONF_TYPE], } opts.pop(CONF_TYPE, None) entry.options = {**opts} entry.version = 3 if entry.version == 3: # Migrate to filename based config_type, to avoid needing to # parse config files to find the right one. config = {**entry.data, **entry.options, "name": entry.title} config_type = get_config(config[CONF_TYPE]).config_type # Special case for kogan_switch. Consider also v2. if config_type == "smartplugv1": device = setup_device(hass, config) config_type = await device.async_inferred_type() if config_type != "smartplugv2": config_type = "smartplugv1" entry.data = { CONF_DEVICE_ID: config[CONF_DEVICE_ID], CONF_LOCAL_KEY: config[CONF_LOCAL_KEY], CONF_HOST: config[CONF_HOST], CONF_TYPE: config_type, } entry.version = 4 if entry.version == 4: # Migrate indexes to entity id rather than type, to allow for multiple # entities of the same type for a device. config = {**entry.data, **entry.options, "name": entry.title} devcfg = get_config(config[CONF_TYPE]) opts = {**entry.options} newopts = {**opts} entry.data = { CONF_DEVICE_ID: config[CONF_DEVICE_ID], CONF_LOCAL_KEY: config[CONF_LOCAL_KEY], CONF_HOST: config[CONF_HOST], CONF_TYPE: config[CONF_TYPE], } e = devcfg.primary_entity if e.config_id != e.entity: newopts.pop(e.entity, None) newopts[e.config_id] = opts.get(e.entity, False) for e in devcfg.secondary_entities(): if e.config_id != e.entity: newopts.pop(e.entity, None) newopts[e.config_id] = opts.get(e.entity, False) entry.options = {**newopts} entry.version = 5 if entry.version == 5: # Migrate unique ids of existing entities to new format old_id = entry.unique_id conf_file = get_config(entry.data[CONF_TYPE]) if conf_file is None: _LOGGER.error(f"Configuration file for {entry.data[CONF_TYPE]} not found.") return False @callback def update_unique_id(entity_entry): """Update the unique id of an entity entry.""" e = conf_file.primary_entity if e.entity != entity_entry.platform: for e in conf_file.secondary_entities(): if e.entity == entity_entry.platform: break if e.entity == entity_entry.platform: new_id = e.unique_id(old_id) if new_id != old_id: _LOGGER.info( f"Migrating {e.entity} unique_id {old_id} to {new_id}." ) return { "new_unique_id": entity_entry.unique_id.replace(old_id, new_id) } await async_migrate_entries(hass, entry.entry_id, update_unique_id) entry.version = 6 if entry.version == 6: # Migrate some entity names to make them consistent for translations opts = {**entry.data, **entry.options} newopts = {**entry.options} master = opts.get("switch_main_switch") if master is not None: newopts.pop("switch_main_switch", None) newopts["switch_master"] = master outlet1 = opts.get("switch_left_outlet") outlet2 = opts.get("switch_right_outlet") outlet1 = opts.get("switch_wall_switch_1") if outlet1 is None else outlet1 outlet2 = opts.get("switch_wall_switch_2") if outlet2 is None else outlet2 if outlet1 is not None: newopts.pop("switch_left_outlet", None) newopts.pop("switch_wall_switch_1", None) newopts["switch_outlet_1"] = outlet1 if outlet2 is not None: newopts.pop("switch_right_outlet", None) newopts.pop("switch_wall_switch_2", None) newopts["switch_outlet_2"] = outlet2 entry.options = {**newopts} entry.version = 7 return True