CONF_EXCLUDE_FEEDID = "exclude_feed_id" CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id" CONF_SENSOR_NAMES = "sensor_names" DECIMALS = 2 DEFAULT_UNIT = POWER_WATT MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5) ONLY_INCL_EXCL_NONE = "only_include_exclude_or_none" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_URL): cv.string, vol.Required(CONF_ID): cv.positive_int, vol.Exclusive(CONF_ONLY_INCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Exclusive(CONF_EXCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Optional(CONF_SENSOR_NAMES): vol.All( {cv.positive_int: vol.All(cv.string, vol.Length(min=1))} ), vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=DEFAULT_UNIT): cv.string, } ) def get_id(sensorid, feedtag, feedname, feedid, feeduserid):
from homeassistant.components.http.ban import process_success_login, process_wrong_login from homeassistant.const import __version__ from .connection import ActiveConnection from .error import Disconnect # mypy: allow-untyped-calls, allow-untyped-defs TYPE_AUTH = "auth" TYPE_AUTH_INVALID = "auth_invalid" TYPE_AUTH_OK = "auth_ok" TYPE_AUTH_REQUIRED = "auth_required" AUTH_MESSAGE_SCHEMA = vol.Schema({ vol.Required("type"): TYPE_AUTH, vol.Exclusive("api_password", "auth"): str, vol.Exclusive("access_token", "auth"): str, }) def auth_ok_message(): """Return an auth_ok message.""" return {"type": TYPE_AUTH_OK, "ha_version": __version__} def auth_required_message(): """Return an auth_required message.""" return {"type": TYPE_AUTH_REQUIRED, "ha_version": __version__} def auth_invalid_message(message):
'Stavanger', 'Sør-Varanger', 'Tromsø', 'Trondheim', 'Tustervatn', 'Zeppelinfjellet', 'Ålesund', ] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Inclusive(CONF_LATITUDE, 'coordinates', 'Latitude and longitude must exist together'): cv.latitude, vol.Inclusive(CONF_LONGITUDE, 'coordinates', 'Latitude and longitude must exist together'): cv.longitude, vol.Exclusive( CONF_AREA, 'station_collection', 'Can only configure one specific station or ' 'stations in a specific area pr sensor. ' 'Please only configure station or area.'): vol.All(cv.string, vol.In(CONF_ALLOWED_AREAS)), vol.Exclusive( CONF_STATION, 'station_collection', 'Can only configure one specific station or ' 'stations in a specific area pr sensor. ' 'Please only configure station or area.'): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean, }) def setup_platform(hass, config, add_entities, discovery_info=None):
SECRET_GROUP = 'Password or SSH Key' PLATFORM_SCHEMA = vol.All( cv.has_at_least_one_key(CONF_PASSWORD, CONF_PUB_KEY, CONF_SSH_KEY), PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_PROTOCOL, default='ssh'): vol.In(['ssh', 'telnet']), vol.Optional(CONF_MODE, default='router'): vol.In(['router', 'ap']), vol.Optional(CONF_PORT, default=DEFAULT_SSH_PORT): cv.port, vol.Exclusive(CONF_PASSWORD, SECRET_GROUP): cv.string, vol.Exclusive(CONF_SSH_KEY, SECRET_GROUP): cv.isfile, vol.Exclusive(CONF_PUB_KEY, SECRET_GROUP): cv.isfile })) _LEASES_CMD = 'cat /var/lib/misc/dnsmasq.leases' _LEASES_REGEX = re.compile(r'\w+\s' + r'(?P<mac>(([0-9a-f]{2}[:-]){5}([0-9a-f]{2})))\s' + r'(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})\s' + r'(?P<host>([^\s]+))') # Command to get both 5GHz and 2.4GHz clients _WL_CMD = 'for dev in `nvram get wl_ifnames`; do wl -i $dev assoclist; done'
BLACKBIRD_SETALLZONES_SCHEMA = MEDIA_PLAYER_SCHEMA.extend( {vol.Required(ATTR_SOURCE): cv.string} ) # Valid zone ids: 1-8 ZONE_IDS = vol.All(vol.Coerce(int), vol.Range(min=1, max=8)) # Valid source ids: 1-8 SOURCE_IDS = vol.All(vol.Coerce(int), vol.Range(min=1, max=8)) PLATFORM_SCHEMA = vol.All( cv.has_at_least_one_key(CONF_PORT, CONF_HOST), PLATFORM_SCHEMA.extend( { vol.Exclusive(CONF_PORT, CONF_TYPE): cv.string, vol.Exclusive(CONF_HOST, CONF_TYPE): cv.string, vol.Required(CONF_ZONES): vol.Schema({ZONE_IDS: ZONE_SCHEMA}), vol.Required(CONF_SOURCES): vol.Schema({SOURCE_IDS: SOURCE_SCHEMA}), } ), ) def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Monoprice Blackbird 4k 8x8 HDBaseT Matrix platform."""
schema[vol.Optional(CONF_TRIGGER_TIME)] = vol.All( cv.time_period, cv.positive_timedelta) if state in SUPPORTED_ARMING_STATES: schema[vol.Optional(CONF_ARMING_TIME)] = vol.All( cv.time_period, cv.positive_timedelta) return vol.Schema(schema) PLATFORM_SCHEMA = vol.Schema( vol.All( { vol.Required(CONF_PLATFORM): "manual", vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string, vol.Exclusive(CONF_CODE, "code validation"): cv.string, vol.Exclusive(CONF_CODE_TEMPLATE, "code validation"): cv.template, vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean, vol.Optional(CONF_DELAY_TIME, default=DEFAULT_DELAY_TIME): vol.All(cv.time_period, cv.positive_timedelta), vol.Optional(CONF_ARMING_TIME, default=DEFAULT_ARMING_TIME): vol.All(cv.time_period, cv.positive_timedelta), vol.Optional(CONF_TRIGGER_TIME, default=DEFAULT_TRIGGER_TIME): vol.All(cv.time_period, cv.positive_timedelta), vol.Optional(CONF_DISARM_AFTER_TRIGGER, default=DEFAULT_DISARM_AFTER_TRIGGER): cv.boolean, vol.Optional(STATE_ALARM_ARMED_AWAY, default={}):
LANGUAGE_FLUX: _QUERY_SENSOR_SCHEMA.extend( { vol.Optional(CONF_BUCKET): cv.string, vol.Optional(CONF_RANGE_START, default=DEFAULT_RANGE_START): cv.string, vol.Optional(CONF_RANGE_STOP, default=DEFAULT_RANGE_STOP): cv.string, vol.Required(CONF_QUERY): cv.template, vol.Optional(CONF_IMPORTS): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_GROUP_FUNCTION): cv.string, } ), } PLATFORM_SCHEMA = vol.All( SENSOR_PLATFORM_SCHEMA.extend(COMPONENT_CONFIG_SCHEMA_CONNECTION).extend( { vol.Exclusive(CONF_QUERIES, "queries"): [_QUERY_SCHEMA[LANGUAGE_INFLUXQL]], vol.Exclusive(CONF_QUERIES_FLUX, "queries"): [_QUERY_SCHEMA[LANGUAGE_FLUX]], } ), validate_version_specific_config, validate_query_format_for_version, create_influx_url, ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the InfluxDB component.""" try: influx = get_influx_connection(config, test_read=True) except ConnectionError as exc: _LOGGER.error(exc)
PULSE_MODE_SOLID ] LIFX_EFFECT_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(ATTR_POWER_ON, default=True): cv.boolean, }) LIFX_EFFECT_PULSE_SCHEMA = LIFX_EFFECT_SCHEMA.extend({ ATTR_BRIGHTNESS: VALID_BRIGHTNESS, ATTR_BRIGHTNESS_PCT: VALID_BRIGHTNESS_PCT, vol.Exclusive(ATTR_COLOR_NAME, COLOR_GROUP): cv.string, vol.Exclusive(ATTR_RGB_COLOR, COLOR_GROUP): vol.All(vol.ExactSequence((cv.byte, cv.byte, cv.byte)), vol.Coerce(tuple)), vol.Exclusive(ATTR_XY_COLOR, COLOR_GROUP): vol.All(vol.ExactSequence((cv.small_float, cv.small_float)), vol.Coerce(tuple)), vol.Exclusive(ATTR_HS_COLOR, COLOR_GROUP): vol.All( vol.ExactSequence( (vol.All(vol.Coerce(float), vol.Range(min=0, max=360)), vol.All(vol.Coerce(float), vol.Range(min=0, max=100)))), vol.Coerce(tuple)), vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): vol.All(vol.Coerce(int), vol.Range(min=1)), vol.Exclusive(ATTR_KELVIN, COLOR_GROUP):
SERVICE_ISSUE_ZIGBEE_GROUP_COMMAND = "issue_zigbee_group_command" SERVICE_DIRECT_ZIGBEE_BIND = "issue_direct_zigbee_bind" SERVICE_DIRECT_ZIGBEE_UNBIND = "issue_direct_zigbee_unbind" SERVICE_WARNING_DEVICE_SQUAWK = "warning_device_squawk" SERVICE_WARNING_DEVICE_WARN = "warning_device_warn" SERVICE_ZIGBEE_BIND = "service_zigbee_bind" IEEE_SERVICE = "ieee_based_service" SERVICE_PERMIT_PARAMS = { vol.Optional(ATTR_IEEE, default=None): EUI64.convert, vol.Optional(ATTR_DURATION, default=60): vol.All( vol.Coerce(int), vol.Range(0, 254) ), vol.Inclusive(ATTR_SOURCE_IEEE, "install_code"): EUI64.convert, vol.Inclusive(ATTR_INSTALL_CODE, "install_code"): convert_install_code, vol.Exclusive(ATTR_QR_CODE, "install_code"): vol.All(str, qr_to_install_code), } SERVICE_SCHEMAS = { SERVICE_PERMIT: vol.Schema( vol.All( cv.deprecated(ATTR_IEEE_ADDRESS, replacement_key=ATTR_IEEE), SERVICE_PERMIT_PARAMS, ) ), IEEE_SERVICE: vol.Schema( vol.All( cv.deprecated(ATTR_IEEE_ADDRESS, replacement_key=ATTR_IEEE), {vol.Required(ATTR_IEEE): EUI64.convert}, ) ),
CONF_VALUE_OR_POSITION_TEMPLATE = 'value_or_position' CONF_OPEN_OR_CLOSE = 'open_or_close' CONF_OPEN_CLOSE_TIME = 'open_close_time' TILT_FEATURES = (SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_STOP_TILT | SUPPORT_SET_TILT_POSITION) COVER_SCHEMA = vol.Schema({ vol.Inclusive(OPEN_ACTION, CONF_OPEN_OR_CLOSE): cv.SCRIPT_SCHEMA, vol.Inclusive(CLOSE_ACTION, CONF_OPEN_OR_CLOSE): cv.SCRIPT_SCHEMA, vol.Optional(STOP_ACTION): cv.SCRIPT_SCHEMA, vol.Exclusive(CONF_POSITION_TEMPLATE, CONF_VALUE_OR_POSITION_TEMPLATE): cv.template, vol.Exclusive(CONF_VALUE_TEMPLATE, CONF_VALUE_OR_POSITION_TEMPLATE): cv.template, vol.Optional(CONF_POSITION_TEMPLATE): cv.template, vol.Optional(CONF_TILT_TEMPLATE): cv.template, vol.Optional(CONF_ICON_TEMPLATE): cv.template, vol.Optional(CONF_OPTIMISTIC): cv.boolean, vol.Optional(CONF_TILT_OPTIMISTIC): cv.boolean, vol.Optional(POSITION_ACTION): cv.SCRIPT_SCHEMA,
import openpeerpower.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DOMAIN = "freedns" DEFAULT_INTERVAL = timedelta(minutes=10) TIMEOUT = 10 UPDATE_URL = "https://freedns.afraid.org/dynamic/update.php" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Exclusive(CONF_URL, DOMAIN): cv.string, vol.Exclusive(CONF_ACCESS_TOKEN, DOMAIN): cv.string, vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All(cv.time_period, cv.positive_timedelta), }) }, extra=vol.ALLOW_EXTRA, ) async def async_setup(opp, config): """Initialize the FreeDNS component.""" conf = config[DOMAIN] url = conf.get(CONF_URL)
cv.ensure_list, [ vol.Schema({ vol.Required(CONF_COMPONENT_NAME): cv.string, vol.Optional(CONF_SIDEBAR_TITLE): cv.string, vol.Optional(CONF_SIDEBAR_ICON, default=DEFAULT_ICON): cv.icon, vol.Optional(CONF_URL_PATH): cv.string, vol.Optional(CONF_CONFIG): dict, vol.Exclusive( CONF_WEBCOMPONENT_PATH, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT, ): cv.string, vol.Exclusive(CONF_JS_URL, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT): cv.string, vol.Exclusive( CONF_MODULE_URL, CONF_URL_EXCLUSIVE_GROUP, msg=MSG_URL_CONFLICT, ): cv.string, vol.Optional(CONF_EMBED_IFRAME, default=DEFAULT_EMBED_IFRAME):
CONF_MOMENTARY = 'momentary' CONF_PAUSE = 'pause' CONF_REPEAT = 'repeat' CONF_INVERSE = 'inverse' CONF_BLINK = 'blink' CONF_DISCOVERY = 'discovery' STATE_LOW = 'low' STATE_HIGH = 'high' PIN_TO_ZONE = {1: 1, 2: 2, 5: 3, 6: 4, 7: 5, 8: 'out', 9: 6} ZONE_TO_PIN = {zone: pin for pin, zone in PIN_TO_ZONE.items()} _BINARY_SENSOR_SCHEMA = vol.All( vol.Schema({ vol.Exclusive(CONF_PIN, 's_pin'): vol.Any(*PIN_TO_ZONE), vol.Exclusive(CONF_ZONE, 's_pin'): vol.Any(*ZONE_TO_PIN), vol.Required(CONF_TYPE): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_INVERSE, default=False): cv.boolean, }), cv.has_at_least_one_key(CONF_PIN, CONF_ZONE) ) _SWITCH_SCHEMA = vol.All( vol.Schema({ vol.Exclusive(CONF_PIN, 'a_pin'): vol.Any(*PIN_TO_ZONE), vol.Exclusive(CONF_ZONE, 'a_pin'): vol.Any(*ZONE_TO_PIN), vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_ACTIVATION, default=STATE_HIGH): vol.All(vol.Lower, vol.Any(STATE_HIGH, STATE_LOW)), vol.Optional(CONF_MOMENTARY):
vol.All( cv.deprecated(DOMAIN), { DOMAIN: vol.Schema( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_PROTOCOL, default=PROTOCOL_SSH): vol.In( [PROTOCOL_SSH, PROTOCOL_TELNET] ), vol.Optional(CONF_MODE, default=MODE_ROUTER): vol.In( [MODE_ROUTER, MODE_AP] ), vol.Optional(CONF_PORT, default=DEFAULT_SSH_PORT): cv.port, vol.Optional(CONF_REQUIRE_IP, default=True): cv.boolean, vol.Exclusive(CONF_PASSWORD, SECRET_GROUP): cv.string, vol.Exclusive(CONF_SSH_KEY, SECRET_GROUP): cv.isfile, vol.Exclusive(CONF_PUB_KEY, SECRET_GROUP): cv.isfile, vol.Optional(CONF_SENSORS): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), vol.Optional(CONF_INTERFACE, default=DEFAULT_INTERFACE): cv.string, vol.Optional(CONF_DNSMASQ, default=DEFAULT_DNSMASQ): cv.string, } ) }, ), extra=vol.ALLOW_EXTRA, )
DEFAULT_MAX_HUMIDITY = 99 ENTITY_ID_FORMAT = DOMAIN + ".{}" SCAN_INTERVAL = timedelta(seconds=60) CONVERTIBLE_ATTRIBUTE = [ ATTR_TEMPERATURE, ATTR_TARGET_TEMP_LOW, ATTR_TARGET_TEMP_HIGH ] _LOGGER = logging.getLogger(__name__) SET_TEMPERATURE_SCHEMA = vol.All( cv.has_at_least_one_key(ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW), make_entity_service_schema({ vol.Exclusive(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), vol.Inclusive(ATTR_TARGET_TEMP_HIGH, "temperature"): vol.Coerce(float), vol.Inclusive(ATTR_TARGET_TEMP_LOW, "temperature"): vol.Coerce(float), vol.Optional(ATTR_HVAC_MODE): vol.In(HVAC_MODES), }), ) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up climate entities.""" component = hass.data[DOMAIN] = EntityComponent(_LOGGER, DOMAIN, hass, SCAN_INTERVAL)
TEMPLATE_SENSOR_BASE_SCHEMA, ) from .const import ( CONF_JSON_ATTRS, CONF_JSON_ATTRS_PATH, DEFAULT_FORCE_UPDATE, DEFAULT_METHOD, DEFAULT_VERIFY_SSL, DOMAIN, METHODS, ) from .data import DEFAULT_TIMEOUT RESOURCE_SCHEMA = { vol.Exclusive(CONF_RESOURCE, CONF_RESOURCE): cv.url, vol.Exclusive(CONF_RESOURCE_TEMPLATE, CONF_RESOURCE): cv.template, vol.Optional(CONF_AUTHENTICATION): vol.In([HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]), vol.Optional(CONF_HEADERS): vol.Schema({cv.string: cv.template}), vol.Optional(CONF_PARAMS): vol.Schema({cv.string: cv.template}), vol.Optional(CONF_METHOD, default=DEFAULT_METHOD): vol.In(METHODS), vol.Optional(CONF_USERNAME): cv.string, vol.Optional(CONF_PASSWORD): cv.string,
SERVICE_SETALLZONES = 'blackbird_set_all_zones' ATTR_SOURCE = 'source' BLACKBIRD_SETALLZONES_SCHEMA = MEDIA_PLAYER_SCHEMA.extend( {vol.Required(ATTR_SOURCE): cv.string}) # Valid zone ids: 1-8 ZONE_IDS = vol.All(vol.Coerce(int), vol.Range(min=1, max=8)) # Valid source ids: 1-8 SOURCE_IDS = vol.All(vol.Coerce(int), vol.Range(min=1, max=8)) PLATFORM_SCHEMA = vol.All( cv.has_at_least_one_key(CONF_PORT, CONF_HOST), PLATFORM_SCHEMA.extend({ vol.Exclusive(CONF_PORT, CONF_TYPE): cv.string, vol.Exclusive(CONF_HOST, CONF_TYPE): cv.string, vol.Required(CONF_ZONES): vol.Schema({ZONE_IDS: ZONE_SCHEMA}), vol.Required(CONF_SOURCES): vol.Schema({SOURCE_IDS: SOURCE_SCHEMA}), })) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Monoprice Blackbird 4k 8x8 HDBaseT Matrix platform.""" if DATA_BLACKBIRD not in hass.data: hass.data[DATA_BLACKBIRD] = {}
CONF_API_REGION = 'api_region' CONF_GRACE_PERIOD = 'grace_period' CONF_DEBUG = 'debug' CONF_ENTITY_PREFIX = 'entity_prefix' DOMAIN = "sonoff" REQUIREMENTS = ['uuid', 'websocket-client==0.54.0'] import websocket _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Exclusive(CONF_USERNAME, CONF_PASSWORD): cv.string, vol.Exclusive(CONF_EMAIL, CONF_PASSWORD): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_API_REGION, default='eu'): cv.string, vol.Optional(CONF_SCAN_INTERVAL, default=timedelta(seconds=30)): cv.time_period, vol.Optional(CONF_GRACE_PERIOD, default=600): cv.positive_int, vol.Optional(CONF_ENTITY_PREFIX, default=True): cv.boolean, vol.Optional(CONF_DEBUG, default=False): cv.boolean }, extra=vol.ALLOW_EXTRA), }, extra=vol.ALLOW_EXTRA) async def async_setup(hass, config): """Setup the eWelink/Sonoff component."""
YEELIGHT_RGB_TRANSITION = 'RGBTransition' YEELIGHT_HSV_TRANSACTION = 'HSVTransition' YEELIGHT_TEMPERATURE_TRANSACTION = 'TemperatureTransition' YEELIGHT_SLEEP_TRANSACTION = 'SleepTransition' YEELIGHT_SERVICE_SCHEMA = vol.Schema({ vol.Required(ATTR_ENTITY_ID): cv.entity_ids, }) YEELIGHT_FLOW_TRANSITION_SCHEMA = { vol.Optional(ATTR_COUNT, default=0): cv.positive_int, vol.Required(ATTR_TRANSITIONS): [{ vol.Exclusive(YEELIGHT_RGB_TRANSITION, CONF_TRANSITION): vol.All(cv.ensure_list, [cv.positive_int]), vol.Exclusive(YEELIGHT_HSV_TRANSACTION, CONF_TRANSITION): vol.All(cv.ensure_list, [cv.positive_int]), vol.Exclusive(YEELIGHT_TEMPERATURE_TRANSACTION, CONF_TRANSITION): vol.All(cv.ensure_list, [cv.positive_int]), vol.Exclusive(YEELIGHT_SLEEP_TRANSACTION, CONF_TRANSITION): vol.All(cv.ensure_list, [cv.positive_int]), }] } DEVICE_SCHEMA = vol.Schema({ vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_TRANSITION, default=DEFAULT_TRANSITION): cv.positive_int,
async def async_setup(hass, config): """Set up all groups found defined in the configuration.""" component = hass.data.get(DOMAIN) if component is None: component = hass.data[DOMAIN] = EntityComponent(_LOGGER, DOMAIN, hass) await _async_process_config(hass, config, component) async def reload_service_handler(service): """Remove all user-defined groups and load new ones from config.""" auto = list(filter(lambda e: not e.user_defined, component.entities)) conf = await component.async_prepare_reload() if conf is None: return await _async_process_config(hass, conf, component) await component.async_add_entities(auto) hass.services.async_register(DOMAIN, SERVICE_RELOAD, reload_service_handler, schema=vol.Schema({})) service_lock = asyncio.Lock() async def locked_service_handler(service): """Handle a service with an async lock.""" async with service_lock: await groups_service_handler(service) async def groups_service_handler(service): """Handle dynamic group service functions.""" object_id = service.data[ATTR_OBJECT_ID] entity_id = ENTITY_ID_FORMAT.format(object_id) group = component.get_entity(entity_id) # new group if service.service == SERVICE_SET and group is None: entity_ids = (service.data.get(ATTR_ENTITIES) or service.data.get(ATTR_ADD_ENTITIES) or None) extra_arg = { attr: service.data[attr] for attr in (ATTR_VISIBLE, ATTR_ICON, ATTR_VIEW, ATTR_CONTROL) if service.data.get(attr) is not None } await Group.async_create_group( hass, service.data.get(ATTR_NAME, object_id), object_id=object_id, entity_ids=entity_ids, user_defined=False, mode=service.data.get(ATTR_ALL), **extra_arg, ) return if group is None: _LOGGER.warning("%s:Group '%s' doesn't exist!", service.service, object_id) return # update group if service.service == SERVICE_SET: need_update = False if ATTR_ADD_ENTITIES in service.data: delta = service.data[ATTR_ADD_ENTITIES] entity_ids = set(group.tracking) | set(delta) await group.async_update_tracked_entity_ids(entity_ids) if ATTR_ENTITIES in service.data: entity_ids = service.data[ATTR_ENTITIES] await group.async_update_tracked_entity_ids(entity_ids) if ATTR_NAME in service.data: group.name = service.data[ATTR_NAME] need_update = True if ATTR_VISIBLE in service.data: group.visible = service.data[ATTR_VISIBLE] need_update = True if ATTR_ICON in service.data: group.icon = service.data[ATTR_ICON] need_update = True if ATTR_CONTROL in service.data: group.control = service.data[ATTR_CONTROL] need_update = True if ATTR_VIEW in service.data: group.view = service.data[ATTR_VIEW] need_update = True if ATTR_ALL in service.data: group.mode = all if service.data[ATTR_ALL] else any need_update = True if need_update: await group.async_update_ha_state() return # remove group if service.service == SERVICE_REMOVE: await component.async_remove_entity(entity_id) hass.services.async_register( DOMAIN, SERVICE_SET, locked_service_handler, schema=vol.Schema({ vol.Required(ATTR_OBJECT_ID): cv.slug, vol.Optional(ATTR_NAME): cv.string, vol.Optional(ATTR_VIEW): cv.boolean, vol.Optional(ATTR_ICON): cv.string, vol.Optional(ATTR_CONTROL): CONTROL_TYPES, vol.Optional(ATTR_VISIBLE): cv.boolean, vol.Optional(ATTR_ALL): cv.boolean, vol.Exclusive(ATTR_ENTITIES, "entities"): cv.entity_ids, vol.Exclusive(ATTR_ADD_ENTITIES, "entities"): cv.entity_ids, }), ) hass.services.async_register( DOMAIN, SERVICE_REMOVE, groups_service_handler, schema=vol.Schema({vol.Required(ATTR_OBJECT_ID): cv.slug}), ) async def visibility_service_handler(service): """Change visibility of a group.""" visible = service.data.get(ATTR_VISIBLE) tasks = [] for group in await component.async_extract_from_service( service, expand_group=False): group.visible = visible tasks.append(group.async_update_ha_state()) if tasks: await asyncio.wait(tasks) hass.services.async_register( DOMAIN, SERVICE_SET_VISIBILITY, visibility_service_handler, schema=make_entity_service_schema( {vol.Required(ATTR_VISIBLE): cv.boolean}), ) return True
vol.Required(CONF_KNX_LOCAL_IP): cv.string, }) EXPOSE_SCHEMA = vol.Schema({ vol.Required(CONF_KNX_EXPOSE_TYPE): cv.string, vol.Optional(CONF_ENTITY_ID): cv.entity_id, vol.Required(CONF_KNX_EXPOSE_ADDRESS): cv.string, }) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Optional(CONF_KNX_CONFIG): cv.string, vol.Exclusive(CONF_KNX_ROUTING, 'connection_type'): ROUTING_SCHEMA, vol.Exclusive(CONF_KNX_TUNNELING, 'connection_type'): TUNNELING_SCHEMA, vol.Inclusive(CONF_KNX_FIRE_EVENT, 'fire_ev'): cv.boolean, vol.Inclusive(CONF_KNX_FIRE_EVENT_FILTER, 'fire_ev'): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_KNX_STATE_UPDATER, default=True): cv.boolean, vol.Optional(CONF_KNX_EXPOSE): vol.All(cv.ensure_list, [EXPOSE_SCHEMA]), }) }, extra=vol.ALLOW_EXTRA)
from esphome.components import sensor from esphome.components.dallas import DallasComponent import esphome.config_validation as cv from esphome.const import CONF_ADDRESS, CONF_DALLAS_ID, CONF_INDEX, CONF_NAME, \ CONF_RESOLUTION from esphome.cpp_generator import HexIntLiteral, get_variable DallasTemperatureSensor = sensor.sensor_ns.class_( 'DallasTemperatureSensor', sensor.EmptyPollingParentSensor) PLATFORM_SCHEMA = cv.nameable( sensor.SENSOR_PLATFORM_SCHEMA.extend({ cv.GenerateID(): cv.declare_variable_id(DallasTemperatureSensor), vol.Exclusive(CONF_ADDRESS, 'dallas'): cv.hex_int, vol.Exclusive(CONF_INDEX, 'dallas'): cv.positive_int, cv.GenerateID(CONF_DALLAS_ID): cv.use_variable_id(DallasComponent), vol.Optional(CONF_RESOLUTION): vol.All(vol.Coerce(int), vol.Range(min=9, max=12)), }), cv.has_at_least_one_key(CONF_ADDRESS, CONF_INDEX)) def to_code(config): for hub in get_variable(config[CONF_DALLAS_ID]): yield if CONF_ADDRESS in config: address = HexIntLiteral(config[CONF_ADDRESS])
MQTT_RW_PLATFORM_SCHEMA = MQTT_BASE_PLATFORM_SCHEMA.extend({ vol.Required(CONF_COMMAND_TOPIC): valid_publish_topic, vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean, vol.Optional(CONF_STATE_TOPIC): valid_subscribe_topic, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, }) # Service call validation schema MQTT_PUBLISH_SCHEMA = vol.Schema( { vol.Required(ATTR_TOPIC): valid_publish_topic, vol.Exclusive(ATTR_PAYLOAD, CONF_PAYLOAD): object, vol.Exclusive(ATTR_PAYLOAD_TEMPLATE, CONF_PAYLOAD): cv.string, vol.Optional(ATTR_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA, vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean, }, required=True) # pylint: disable=invalid-name PublishPayloadType = Union[str, bytes, int, float, None] SubscribePayloadType = Union[str, bytes] # Only bytes if encoding is None @attr.s(slots=True, frozen=True) class Message: """MQTT Message."""
from homeassistant.components.http.auth import validate_password from homeassistant.components.http.ban import process_wrong_login, \ process_success_login from homeassistant.auth.providers import legacy_api_password from .connection import ActiveConnection from .error import Disconnect TYPE_AUTH = 'auth' TYPE_AUTH_INVALID = 'auth_invalid' TYPE_AUTH_OK = 'auth_ok' TYPE_AUTH_REQUIRED = 'auth_required' AUTH_MESSAGE_SCHEMA = vol.Schema({ vol.Required('type'): TYPE_AUTH, vol.Exclusive('api_password', 'auth'): str, vol.Exclusive('access_token', 'auth'): str, }) def auth_ok_message(): """Return an auth_ok message.""" return { 'type': TYPE_AUTH_OK, 'ha_version': __version__, } def auth_required_message(): """Return an auth_required message.""" return {
# Calendar Platform: When does a calendar event start? START = 'start' # Calendar Platform: What is the next calendar event about? SUMMARY = 'summary' # Todoist API: Fetch all Tasks TASKS = 'items' SERVICE_NEW_TASK = 'todoist_new_task' NEW_TASK_SERVICE_SCHEMA = vol.Schema({ vol.Required(CONTENT): cv.string, vol.Optional(PROJECT_NAME, default='inbox'): vol.All(cv.string, vol.Lower), vol.Optional(LABELS): cv.ensure_list_csv, vol.Optional(PRIORITY): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), vol.Exclusive(DUE_DATE_STRING, 'due_date'): cv.string, vol.Optional(DUE_DATE_LANG): vol.All(cv.string, vol.In(DUE_DATE_VALID_LANGS)), vol.Exclusive(DUE_DATE, 'due_date'): cv.string, }) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_TOKEN): cv.string, vol.Optional(CONF_EXTRA_PROJECTS, default=[]): vol.All(cv.ensure_list, vol.Schema([ vol.Schema({ vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_PROJECT_DUE_DATE): vol.Coerce(int), vol.Optional(CONF_PROJECT_WHITELIST, default=[]): vol.All(cv.ensure_list, [vol.All(cv.string, vol.Lower)]), vol.Optional(CONF_PROJECT_LABEL_WHITELIST, default=[]):
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, }) SET_AWAY_MODE_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_AWAY_MODE): cv.boolean, }) SET_AUX_HEAT_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_AUX_HEAT): cv.boolean, }) SET_TEMPERATURE_SCHEMA = vol.Schema(vol.All( cv.has_at_least_one_key( ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW), { vol.Exclusive(ATTR_TEMPERATURE, 'temperature'): vol.Coerce(float), vol.Inclusive(ATTR_TARGET_TEMP_HIGH, 'temperature'): vol.Coerce(float), vol.Inclusive(ATTR_TARGET_TEMP_LOW, 'temperature'): vol.Coerce(float), vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(ATTR_OPERATION_MODE): cv.string, } )) SET_FAN_MODE_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_FAN_MODE): cv.string, }) SET_HOLD_MODE_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_HOLD_MODE): cv.string, }) SET_OPERATION_MODE_SCHEMA = vol.Schema({
def _validate_schema(config): if config.get(CONF_LOCATION) is None: if not all(config.get(x) for x in (CONF_ID, CONF_DELTA, CONF_FRAMES)): raise vol.Invalid( "Specify '{}', '{}' and '{}' when '{}' is unspecified".format( CONF_ID, CONF_DELTA, CONF_FRAMES, CONF_LOCATION)) return config LOCATIONS_MSG = "Set '{}' to one of: {}".format(CONF_LOCATION, ", ".join(sorted(LOCATIONS))) XOR_MSG = "Specify exactly one of '{}' or '{}'".format(CONF_ID, CONF_LOCATION) PLATFORM_SCHEMA = vol.All( PLATFORM_SCHEMA.extend({ vol.Exclusive(CONF_ID, "xor", msg=XOR_MSG): cv.string, vol.Exclusive(CONF_LOCATION, "xor", msg=XOR_MSG): vol.In(LOCATIONS, msg=LOCATIONS_MSG), vol.Optional(CONF_DELTA): cv.positive_int, vol.Optional(CONF_FRAMES): cv.positive_int, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_OUTFILE): cv.string, }), _validate_schema, )
} DEFAULT_VOICE = 'Joanna' DEFAULT_OUTPUT_FORMAT = 'mp3' DEFAULT_TEXT_TYPE = 'text' DEFAULT_SAMPLE_RATES = {'mp3': '22050', 'ogg_vorbis': '22050', 'pcm': '16000'} PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_REGION, default=DEFAULT_REGION): vol.In(SUPPORTED_REGIONS), vol.Inclusive(CONF_ACCESS_KEY_ID, ATTR_CREDENTIALS): cv.string, vol.Inclusive(CONF_SECRET_ACCESS_KEY, ATTR_CREDENTIALS): cv.string, vol.Exclusive(CONF_PROFILE_NAME, ATTR_CREDENTIALS): cv.string, vol.Optional(CONF_VOICE, default=DEFAULT_VOICE): vol.In(SUPPORTED_VOICES), vol.Optional(CONF_OUTPUT_FORMAT, default=DEFAULT_OUTPUT_FORMAT): vol.In(SUPPORTED_OUTPUT_FORMATS), vol.Optional(CONF_SAMPLE_RATE): vol.All(cv.string, vol.In(SUPPORTED_SAMPLE_RATES)), vol.Optional(CONF_TEXT_TYPE, default=DEFAULT_TEXT_TYPE): vol.In(SUPPORTED_TEXT_TYPES), }) def get_engine(hass, config): """Set up Amazon Polly speech component.""" # pylint: disable=import-error
class ButtonSchema(KNXPlatformSchema): """Voluptuous schema for KNX buttons.""" PLATFORM = Platform.BUTTON CONF_VALUE = "value" DEFAULT_NAME = "KNX Button" payload_or_value_msg = f"Please use only one of `{CONF_PAYLOAD}` or `{CONF_VALUE}`" length_or_type_msg = ( f"Please use only one of `{CONF_PAYLOAD_LENGTH}` or `{CONF_TYPE}`") ENTITY_SCHEMA = vol.All( vol.Schema({ vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Required(KNX_ADDRESS): ga_validator, vol.Exclusive(CONF_PAYLOAD, "payload_or_value", msg=payload_or_value_msg): object, vol.Exclusive(CONF_VALUE, "payload_or_value", msg=payload_or_value_msg): object, vol.Exclusive(CONF_PAYLOAD_LENGTH, "length_or_type", msg=length_or_type_msg): object, vol.Exclusive(CONF_TYPE, "length_or_type", msg=length_or_type_msg): object, vol.Optional(CONF_ENTITY_CATEGORY): validate_entity_category, }), vol.Any( vol.Schema( # encoded value { vol.Required(CONF_VALUE): vol.Any(int, float, str), vol.Required(CONF_TYPE): sensor_type_validator, }, extra=vol.ALLOW_EXTRA, ), vol.Schema( # raw payload - default is DPT 1 style True { vol.Optional(CONF_PAYLOAD, default=1): cv.positive_int, vol.Optional(CONF_PAYLOAD_LENGTH, default=0): vol.All(vol.Coerce(int), vol.Range(min=0, max=14)), vol.Optional(CONF_VALUE): None, vol.Optional(CONF_TYPE): None, }, extra=vol.ALLOW_EXTRA, ), ), # calculate raw CONF_PAYLOAD and CONF_PAYLOAD_LENGTH # from CONF_VALUE and CONF_TYPE if given and check payload size button_payload_sub_validator, )
SCRIPT_ACTION_BASE_SCHEMA = {vol.Optional(CONF_ALIAS): string} EVENT_SCHEMA = vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, vol.Required(CONF_EVENT): string, vol.Optional(CONF_EVENT_DATA): vol.All(dict, template_complex), vol.Optional(CONF_EVENT_DATA_TEMPLATE): vol.All(dict, template_complex), } ) SERVICE_SCHEMA = vol.All( vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, vol.Exclusive(CONF_SERVICE, "service name"): vol.Any( service, dynamic_template ), vol.Exclusive(CONF_SERVICE_TEMPLATE, "service name"): vol.Any( service, dynamic_template ), vol.Optional("data"): vol.Any(template, vol.All(dict, template_complex)), vol.Optional("data_template"): vol.Any( template, vol.All(dict, template_complex) ), vol.Optional(CONF_ENTITY_ID): comp_entity_ids, vol.Optional(CONF_TARGET): vol.Any(ENTITY_SERVICE_FIELDS, dynamic_template), } ), has_at_least_one_key(CONF_SERVICE, CONF_SERVICE_TEMPLATE), )