) from . import DOMAIN as ELK_DOMAIN, ElkEntity, create_elk_entities SIGNAL_ARM_ENTITY = "elkm1_arm" SIGNAL_DISPLAY_MESSAGE = "elkm1_display_message" ELK_ALARM_SERVICE_SCHEMA = vol.Schema({ vol.Required(ATTR_ENTITY_ID, default=[]): cv.entity_ids, vol.Required(ATTR_CODE): vol.All(vol.Coerce(int), vol.Range(0, 999999)), }) DISPLAY_MESSAGE_SERVICE_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID, default=[]): cv.entity_ids, vol.Optional("clear", default=2): vol.All(vol.Coerce(int), vol.In([0, 1, 2])), vol.Optional("beep", default=False): cv.boolean, vol.Optional("timeout", default=0): vol.All(vol.Coerce(int), vol.Range(min=0, max=65535)), vol.Optional("line1", default=""): cv.string, vol.Optional("line2", default=""): cv.string, }) async def async_setup_platform(hass,
CONF_TOKEN, ATTR_ENTITY_ID,) from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'Xiaomi Miio Platform' PLATFORM = 'xiaomi_miio_devices' PLATFORM_ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, }) REQUIREMENTS = ['python-miio==0.3.5'] # pylint: disable=unused-argument def setup_platform(hass, config, add_devices, discovery_info=None): """Set up the air purifier from config.""" from miio import AirPurifier, DeviceException if PLATFORM not in hass.data: hass.data[PLATFORM] = {} host = config.get(CONF_HOST) name = config.get(CONF_NAME) token = config.get(CONF_TOKEN)
ATTR_ENCRYPT = "Encrypted" ATTR_POOL_GUID = "GUID" ATTR_POOL_IS_DECRYPTED = "Is Decrypted" ATTR_POOL_NAME = "Pool Name" CONF_AUTH_MODE = "auth_mode" CONF_AUTH_PASSWORD = "******" CONF_AUTH_API_KEY = "API Key" DEFAULT_SCAN_INTERVAL_SECONDS = 30 SERVICE_JAIL_START = "jail_start" SCHEMA_SERVICE_JAIL_START = {} SERVICE_JAIL_STOP = "jail_stop" SCHEMA_SERVICE_JAIL_STOP = { vol.Optional("force"): cv.boolean, } SERVICE_JAIL_RESTART = "jail_restart" SCHEMA_SERVICE_JAIL_RESTART = {} SERVICE_VM_START = "vm_start" SCHEMA_SERVICE_VM_START = { vol.Optional("overcommit"): cv.boolean, } SERVICE_VM_STOP = "vm_stop" SCHEMA_SERVICE_VM_STOP = { vol.Optional("force"): cv.boolean, } SERVICE_VM_RESTART = "vm_restart" SCHEMA_SERVICE_VM_RESTART = {}
from openpeerpower.helpers.discovery import async_load_platform from .const import CONF_FUEL_TYPES, CONF_STATIONS, DOMAIN, FUEL_TYPES _LOGGER = logging.getLogger(__name__) DEFAULT_RADIUS = 2 DEFAULT_SCAN_INTERVAL = timedelta(minutes=30) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL): cv.time_period, vol.Optional(CONF_FUEL_TYPES, default=FUEL_TYPES): vol.All(cv.ensure_list, [vol.In(FUEL_TYPES)]), vol.Inclusive( CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together", ): cv.latitude, vol.Inclusive( CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together", ): cv.longitude,
KEY_CONFIG = 'tradfri_configuring' KEY_GATEWAY = 'tradfri_gateway' KEY_API = 'tradfri_api' KEY_TRADFRI_GROUPS = 'tradfri_allow_tradfri_groups' CONF_ALLOW_TRADFRI_GROUPS = 'allow_tradfri_groups' DEFAULT_ALLOW_TRADFRI_GROUPS = True CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Inclusive(CONF_HOST, 'gateway'): cv.string, vol.Inclusive(CONF_API_KEY, 'gateway'): cv.string, vol.Optional(CONF_ALLOW_TRADFRI_GROUPS, default=DEFAULT_ALLOW_TRADFRI_GROUPS): cv.boolean, }) }, extra=vol.ALLOW_EXTRA) _LOGGER = logging.getLogger(__name__) def request_configuration(hass, config, host): """Request configuration steps from the user.""" configurator = hass.components.configurator hass.data.setdefault(KEY_CONFIG, {}) instance = hass.data[KEY_CONFIG].get(host) # Configuration already in progress
ICONS = { 'U-Bahn': 'mdi:subway', 'Tram': 'mdi:tram', 'Bus': 'mdi:bus', 'ExpressBus': 'mdi:bus', 'S-Bahn': 'mdi:train', 'SEV': 'mdi:checkbox-blank-circle-outline', '-': 'mdi:clock' } ATTRIBUTION = "Data provided by MVG-live.de" SCAN_INTERVAL = timedelta(seconds=30) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_NEXT_DEPARTURE): [{ vol.Required(CONF_STATION): cv.string, vol.Optional(CONF_DESTINATIONS, default=['']): cv.ensure_list_csv, vol.Optional(CONF_DIRECTIONS, default=['']): cv.ensure_list_csv, vol.Optional(CONF_LINES, default=['']): cv.ensure_list_csv, vol.Optional(CONF_PRODUCTS, default=DEFAULT_PRODUCT): cv.ensure_list_csv, vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int, vol.Optional(CONF_NUMBER, default=1): cv.positive_int, vol.Optional(CONF_NAME):
traces = [] for trace in hass.data[DATA_TRACE].get(key, {}).values(): traces.append(trace.as_short_dict()) return traces @callback @websocket_api.require_admin @websocket_api.websocket_command({ vol.Required("type"): "trace/list", vol.Required("domain", "id"): vol.In(TRACE_DOMAINS), vol.Optional("item_id", "id"): str, }) def websocket_trace_list(hass, connection, msg): """Summarize script and automation traces.""" domain = msg["domain"] key = (domain, msg["item_id"]) if "item_id" in msg else None if not key: traces = [] for key in hass.data[DATA_TRACE]: if key[0] == domain: traces.extend(get_debug_traces(hass, key)) else: traces = get_debug_traces(hass, key)
CONF_SCAN_INTERVAL, ) from openpeerpower.helpers import config_validation as cv from .const import ( CONF_MINIMUM_MAGNITUDE, CONF_MMI, DEFAULT_MINIMUM_MAGNITUDE, DEFAULT_MMI, DEFAULT_RADIUS, DEFAULT_SCAN_INTERVAL, DOMAIN, ) DATA_SCHEMA = vol.Schema({ vol.Optional(CONF_MMI, default=DEFAULT_MMI): vol.All(vol.Coerce(int), vol.Range(min=-1, max=8)), vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): cv.positive_int, }) _LOGGER = logging.getLogger(__name__) class GeonetnzQuakesFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a GeoNet NZ Quakes config flow.""" async def _show_form(self, errors=None): """Show the form to the user.""" return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA, errors=errors or {})
} SSD1306_MODEL = vol.All(vol.Replace(' ', '_'), cv.one_of(*MODELS, upper=True)) PLATFORM_SCHEMA = display.FULL_DISPLAY_PLATFORM_SCHEMA.extend({ cv.GenerateID(): cv.declare_variable_id(SPISSD1306), cv.GenerateID(CONF_SPI_ID): cv.use_variable_id(SPIComponent), vol.Required(CONF_CS_PIN): pins.gpio_output_pin_schema, vol.Required(CONF_DC_PIN): pins.gpio_output_pin_schema, vol.Required(CONF_MODEL): SSD1306_MODEL, vol.Optional(CONF_RESET_PIN): pins.gpio_output_pin_schema, vol.Optional(CONF_EXTERNAL_VCC): cv.boolean, }).extend(cv.COMPONENT_SCHEMA.schema) def to_code(config): for spi_ in get_variable(config[CONF_SPI_ID]): yield for cs in gpio_output_pin_expression(config[CONF_CS_PIN]): yield for dc in gpio_output_pin_expression(config[CONF_DC_PIN]): yield rhs = App.make_spi_ssd1306(spi_, cs, dc)
ICONS = { 'BTC': 'mdi:currency-btc', 'EUR': 'mdi:currency-eur', 'GBP': 'mdi:currency-gbp', 'INR': 'mdi:currency-inr', 'RUB': 'mdi:currency-rub', 'TRY': 'mdi:currency-try', 'USD': 'mdi:currency-usd', } SCAN_INTERVAL = timedelta(minutes=5) SYMBOL_SCHEMA = vol.Schema({ vol.Required(CONF_SYMBOL): cv.string, vol.Optional(CONF_CURRENCY): cv.string, vol.Optional(CONF_NAME): cv.string, }) CURRENCY_SCHEMA = vol.Schema({ vol.Required(CONF_FROM): cv.string, vol.Required(CONF_TO): cv.string, vol.Optional(CONF_NAME): cv.string, }) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_FOREIGN_EXCHANGE): vol.All(cv.ensure_list, [CURRENCY_SCHEMA]), vol.Optional(CONF_SYMBOLS):
BOOT_AUTO, BOOT_MANUAL, CONTENT_TYPE_BINARY, CONTENT_TYPE_PNG, CONTENT_TYPE_TEXT, REQUEST_FROM, ) from ..coresys import CoreSysAttributes from ..exceptions import APIError from ..validate import ALSA_DEVICE, DOCKER_PORTS from .utils import api_process, api_process_raw, api_validate _LOGGER = logging.getLogger(__name__) SCHEMA_VERSION = vol.Schema({ vol.Optional(ATTR_VERSION): vol.Coerce(str), }) # pylint: disable=no-value-for-parameter SCHEMA_OPTIONS = vol.Schema({ vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS), vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(), vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE, vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE, vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(), }) # pylint: disable=no-value-for-parameter SCHEMA_SECURITY = vol.Schema({ vol.Optional(ATTR_PROTECTED): vol.Boolean(),
# https://home-assistant.io/developers/creating_components/ # Import the device class from the component that you want to support # Home Assistant depends on 3rd party packages for API specific code. REQUIREMENTS = ['pyduofern==0.25.2'] _LOGGER = logging.getLogger(__name__) from .const import DOMAIN, DUOFERN_COMPONENTS, CONF_SERIAL_PORT, CONF_CODE # Validation of the user's configuration CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Optional('serial_port', default="/dev/serial/by-id/usb-Rademacher_DuoFern_USB-Stick_WR04ZFP4-if00-port0"): cv.string, vol.Optional('config_file', default=os.path.join(os.path.dirname(__file__), "../../duofern.json")): cv.string, # config file: default to homeassistant config directory (assuming this is a custom component) vol.Optional('code', default="0000"): cv.string, }), }, extra=vol.ALLOW_EXTRA) PAIRING_SCHEMA = vol.Schema({ vol.Optional('timeout', default=30): cv.positive_int, })
from .handler import HassIO from .http import HassIOView _LOGGER = logging.getLogger(__name__) DOMAIN = 'hassio' DEPENDENCIES = ['http'] STORAGE_KEY = DOMAIN STORAGE_VERSION = 1 CONF_FRONTEND_REPO = 'development_repo' CONFIG_SCHEMA = vol.Schema( { vol.Optional(DOMAIN): vol.Schema({ vol.Optional(CONF_FRONTEND_REPO): cv.isdir, }), }, extra=vol.ALLOW_EXTRA) DATA_HOMEASSISTANT_VERSION = 'hassio_hass_version' HASSIO_UPDATE_INTERVAL = timedelta(minutes=55) SERVICE_ADDON_START = 'addon_start' SERVICE_ADDON_STOP = 'addon_stop' SERVICE_ADDON_RESTART = 'addon_restart' SERVICE_ADDON_STDIN = 'addon_stdin' SERVICE_HOST_SHUTDOWN = 'host_shutdown' SERVICE_HOST_REBOOT = 'host_reboot'
from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA) from homeassistant.const import CONF_NAME import homeassistant.helpers.config_validation as cv DEPENDENCIES = ['arduino2'] _LOGGER = logging.getLogger(__name__) CONF_PINS = 'pins' CONF_TYPE = 'digital' CONF_NEGATE = 'negate' CONF_INITIAL = 'initial' PIN_SCHEMA = vol.Schema({ vol.Required(CONF_NAME): cv.string, vol.Optional(CONF_INITIAL, default=False): cv.boolean, vol.Optional(CONF_NEGATE, default=False): cv.boolean, }) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_PINS, default={}): vol.Schema({cv.positive_int: PIN_SCHEMA}), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Arduino platform.""" # Verify that Arduino board is present if arduino2.BOARD is None: _LOGGER.error("A connection has not been made to the Arduino board") return False
"Specify '{}', '{}' and '{}' when '{}' is unspecified".format( CONF_ID, CONF_DELTA, CONF_FRAMES, CONF_LOCATION)) return config LOCATIONS_MSG = "Set '{}' to one of: {}".format(CONF_LOCATION, ", ".join(sorted(LOCATIONS))) XOR_MSG = f"Specify exactly one of '{CONF_ID}' or '{CONF_LOCATION}'" PLATFORM_SCHEMA = vol.All( PLATFORM_SCHEMA.extend({ vol.Exclusive(CONF_ID, "xor", msg=XOR_MSG): cv.string, vol.Exclusive(CONF_LOCATION, "xor", msg=XOR_MSG): vol.In(LOCATIONS, msg=LOCATIONS_MSG), vol.Optional(CONF_DELTA): cv.positive_int, vol.Optional(CONF_FRAMES): cv.positive_int, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_OUTFILE): cv.string, }), _validate_schema, ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up BOM radar-loop camera component.""" location = config.get(CONF_LOCATION) or "ID {}".format(config.get(CONF_ID))
# DHT11 is able to deliver data once per second, DHT22 once every two MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30) SENSOR_TEMPERATURE = 'temperature' SENSOR_HUMIDITY = 'humidity' SENSOR_TYPES = { SENSOR_TEMPERATURE: ['Temperature', None], SENSOR_HUMIDITY: ['Humidity', '%'] } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_SENSOR): cv.string, vol.Required(CONF_PIN): cv.string, vol.Optional(CONF_MONITORED_CONDITIONS, default=[]): vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, }) def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the DHT sensor.""" # pylint: disable=import-error import Adafruit_DHT SENSOR_TYPES[SENSOR_TEMPERATURE][1] = hass.config.units.temperature_unit available_sensors = { "DHT11": Adafruit_DHT.DHT11, "DHT22": Adafruit_DHT.DHT22,
ATTR_DEVICE_ID = 'device_id' ATTR_DISTANCE = 'distance' ATTR_ROOM = 'room' CONF_DEVICE_ID = 'device_id' CONF_AWAY_TIMEOUT = 'away_timeout' DEFAULT_AWAY_TIMEOUT = 0 DEFAULT_NAME = 'Room Sensor' DEFAULT_TIMEOUT = 5 DEFAULT_TOPIC = 'room_presence' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_DEVICE_ID): cv.string, vol.Required(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, vol.Optional(CONF_AWAY_TIMEOUT, default=DEFAULT_AWAY_TIMEOUT): cv.positive_int, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string }).extend(mqtt.MQTT_RO_PLATFORM_SCHEMA.schema) MQTT_PAYLOAD = vol.Schema(vol.All(json.loads, vol.Schema({ vol.Required(ATTR_ID): cv.string, vol.Required(ATTR_DISTANCE): vol.Coerce(float), }, extra=vol.ALLOW_EXTRA))) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up MQTT room Sensor.""" async_add_entities([MQTTRoomSensor( config.get(CONF_NAME), config.get(CONF_STATE_TOPIC),
def __init__(self): """Initialize the config flow.""" if self.hass and not self.hass.data.get(DATA_ALEXAMEDIA): _LOGGER.info(STARTUP) _LOGGER.info("Loaded alexapy==%s", alexapy_version) self.login = None self.securitycode: Optional[Text] = None self.automatic_steps: int = 0 self.config = OrderedDict() self.proxy_schema = None self.data_schema = OrderedDict( [ (vol.Optional(CONF_PROXY, default=False), bool), (vol.Required(CONF_EMAIL), str), (vol.Required(CONF_PASSWORD), str), (vol.Required(CONF_URL, default="amazon.com"), str), (vol.Optional(CONF_SECURITYCODE), str), (vol.Optional(CONF_OTPSECRET), str), (vol.Optional(CONF_DEBUG, default=False), bool), (vol.Optional(CONF_INCLUDE_DEVICES, default=""), str), (vol.Optional(CONF_EXCLUDE_DEVICES, default=""), str), (vol.Optional(CONF_SCAN_INTERVAL, default=60), int), (vol.Optional(CONF_COOKIES_TXT, default=""), str), (vol.Optional(CONF_OAUTH_LOGIN, default=True), bool), ] ) self.captcha_schema = OrderedDict( [ (vol.Optional(CONF_PROXY, default=False), bool), (vol.Required(CONF_PASSWORD), str), ( vol.Optional( CONF_SECURITYCODE, default=self.securitycode if self.securitycode else "", ), str, ), (vol.Required("captcha"), str), ] ) self.twofactor_schema = OrderedDict( [ (vol.Optional(CONF_PROXY, default=False), bool), ( vol.Required( CONF_SECURITYCODE, default=self.securitycode if self.securitycode else "", ), str, ), ] ) self.claimspicker_schema = OrderedDict( [ (vol.Optional(CONF_PROXY, default=False), bool), ( vol.Required("claimsoption", default=0), vol.All(cv.positive_int, vol.Clamp(min=0)), ), ] ) self.authselect_schema = OrderedDict( [ (vol.Optional(CONF_PROXY, default=False), bool), ( vol.Required("authselectoption", default=0), vol.All(cv.positive_int, vol.Clamp(min=0)), ), ] ) self.verificationcode_schema = OrderedDict( [ (vol.Optional(CONF_PROXY, default=False), bool), (vol.Required("verificationcode"), str), ] ) self.totp_register = OrderedDict( [(vol.Optional(CONF_TOTP_REGISTER, default=False), bool)] )
DEFAULT_NAME = "NMBS" DEFAULT_ICON = "mdi:train" DEFAULT_ICON_ALERT = "mdi:alert-octagon" CONF_STATION_FROM = "station_from" CONF_STATION_TO = "station_to" CONF_STATION_LIVE = "station_live" CONF_EXCLUDE_VIAS = "exclude_vias" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_STATION_FROM): cv.string, vol.Required(CONF_STATION_TO): cv.string, vol.Optional(CONF_STATION_LIVE): cv.string, vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean, }) def get_time_until(departure_time=None): """Calculate the time between now and a train's departure time.""" if departure_time is None: return 0
async def async_step_user(self, user_input=None): """Provide a proxy for login.""" self._save_user_input_to_config(user_input=user_input) self.proxy_schema = OrderedDict( [ ( vol.Required(CONF_EMAIL, default=self.config.get(CONF_EMAIL, "")), str, ), ( vol.Required( CONF_PASSWORD, default=self.config.get(CONF_PASSWORD, "") ), str, ), ( vol.Required( CONF_URL, default=self.config.get(CONF_URL, "amazon.com") ), str, ), ( vol.Required( CONF_HASS_URL, default=self.config.get( CONF_HASS_URL, get_url(self.hass, prefer_external=True) ), ), str, ), ( vol.Optional( CONF_OTPSECRET, default=self.config.get(CONF_OTPSECRET, "") ), str, ), ( vol.Optional( CONF_DEBUG, default=self.config.get(CONF_DEBUG, False) ), bool, ), ( vol.Optional( CONF_INCLUDE_DEVICES, default=self.config.get(CONF_INCLUDE_DEVICES, ""), ), str, ), ( vol.Optional( CONF_EXCLUDE_DEVICES, default=self.config.get(CONF_EXCLUDE_DEVICES, ""), ), str, ), ( vol.Optional( CONF_SCAN_INTERVAL, default=self.config.get(CONF_SCAN_INTERVAL, 60), ), int, ), ( vol.Optional(CONF_PROXY, default=self.config.get(CONF_PROXY, True)), bool, ), ( vol.Optional( CONF_OAUTH_LOGIN, default=self.config.get(CONF_OAUTH_LOGIN, True), ), bool, ), ] ) if not user_input: return self.async_show_form( step_id="user", data_schema=vol.Schema(self.proxy_schema), description_placeholders={"message": ""}, ) if user_input and not user_input.get(CONF_PROXY): return self.async_show_form( step_id="user_legacy", data_schema=vol.Schema(self._update_schema_defaults()), description_placeholders={"message": ""}, ) if self.login is None: try: self.login = self.hass.data[DATA_ALEXAMEDIA]["accounts"][ self.config[CONF_EMAIL] ].get("login_obj") except KeyError: self.login = None try: if not self.login or self.login.session.closed: _LOGGER.debug("Creating new login") uuid_dict = await calculate_uuid( self.hass, self.config.get(CONF_EMAIL), self.config[CONF_URL] ) uuid = uuid_dict["uuid"] self.login = AlexaLogin( url=self.config[CONF_URL], email=self.config.get(CONF_EMAIL, ""), password=self.config.get(CONF_PASSWORD, ""), outputpath=self.hass.config.path, debug=self.config[CONF_DEBUG], otp_secret=self.config.get(CONF_OTPSECRET, ""), uuid=uuid, oauth_login=self.config.get(CONF_OAUTH_LOGIN, True), ) else: _LOGGER.debug("Using existing login") if self.config.get(CONF_EMAIL): self.login.email = self.config.get(CONF_EMAIL) if self.config.get(CONF_PASSWORD): self.login.password = self.config.get(CONF_PASSWORD) if self.config.get(CONF_OTPSECRET): self.login.set_totp(self.config.get(CONF_OTPSECRET, "")) except AlexapyPyotpInvalidKey: return self.async_show_form( step_id="user", errors={"base": "2fa_key_invalid"}, description_placeholders={"message": ""}, ) hass_url: Text = user_input.get(CONF_HASS_URL) hass_url_valid: bool = False async with ClientSession() as session: try: async with session.get(hass_url) as resp: hass_url_valid = resp.status == 200 except ClientConnectionError: hass_url_valid = False if not hass_url_valid: _LOGGER.debug( "Unable to connect to provided Home Assistant url: %s", hass_url ) return self.async_show_form( step_id="user", errors={"base": "hass_url_invalid"}, description_placeholders={"message": ""}, ) if not self.proxy: self.proxy = AlexaProxy( self.login, str(URL(hass_url).with_path(AUTH_PROXY_PATH)) ) # Swap the login object self.proxy.change_login(self.login) if ( user_input and user_input.get(CONF_OTPSECRET) and user_input.get(CONF_OTPSECRET).replace(" ", "") ): otp: Text = self.login.get_totp_token() if otp: _LOGGER.debug("Generating OTP from %s", otp) return self.async_show_form( step_id="totp_register", data_schema=vol.Schema(self.totp_register), errors={}, description_placeholders={ "email": self.login.email, "url": self.login.url, "message": otp, }, ) return await self.async_step_start_proxy(user_input)
ATTR_HS_COLOR, ATTR_WHITE, PLATFORM_SCHEMA, ColorMode, LightEntity, ) from homeassistant.const import CONF_DEVICES, CONF_NAME from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.color as color_util _LOGGER = logging.getLogger(__name__) DEVICE_SCHEMA = vol.Schema({vol.Optional(CONF_NAME): cv.string}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( {vol.Optional(CONF_DEVICES, default={}): { cv.string: DEVICE_SCHEMA }}) def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Zengge platform.""" lights = []
async def _test_login(self): # pylint: disable=too-many-statements, too-many-return-statements login = self.login email = login.email _LOGGER.debug("Testing login status: %s", login.status) if login.status and login.status.get("login_successful"): existing_entry = await self.async_set_unique_id(f"{email} - {login.url}") if self.config.get("reauth"): self.config.pop("reauth") if self.config.get(CONF_SECURITYCODE): self.config.pop(CONF_SECURITYCODE) if self.config.get(CONF_PROXY): self.config.pop(CONF_PROXY) if self.config.get("hass_url"): self.config.pop("hass_url") self.config[CONF_OAUTH] = { "access_token": login.access_token, "refresh_token": login.refresh_token, "expires_in": login.expires_in, } self.hass.data.setdefault( DATA_ALEXAMEDIA, {"accounts": {}, "config_flows": {}}, ) if existing_entry: self.hass.config_entries.async_update_entry( existing_entry, data=self.config ) _LOGGER.debug("Reauth successful for %s", hide_email(email)) self.hass.bus.async_fire( "alexa_media_relogin_success", event_data={"email": hide_email(email), "url": login.url}, ) self.hass.components.persistent_notification.async_dismiss( f"alexa_media_{slugify(email)}{slugify(login.url[7:])}" ) self.hass.data[DATA_ALEXAMEDIA]["accounts"][self.config[CONF_EMAIL]][ "login_obj" ] = self.login self.hass.data[DATA_ALEXAMEDIA]["config_flows"][ f"{email} - {login.url}" ] = None return self.async_abort(reason="reauth_successful") _LOGGER.debug( "Setting up Alexa devices with %s", dict(obfuscate(self.config)) ) self._abort_if_unique_id_configured(self.config) return self.async_create_entry( title=f"{login.email} - {login.url}", data=self.config ) if login.status and login.status.get("captcha_required"): new_schema = self._update_ord_dict( self.captcha_schema, { vol.Required( CONF_PASSWORD, default=self.config[CONF_PASSWORD] ): str, vol.Optional( CONF_SECURITYCODE, default=self.securitycode if self.securitycode else "", ): str, }, ) _LOGGER.debug("Creating config_flow to request captcha") self.automatic_steps = 0 return self.async_show_form( step_id="captcha", data_schema=vol.Schema(new_schema), errors={}, description_placeholders={ "email": login.email, "url": login.url, "captcha_image": "[![captcha]({0})]({0})".format( login.status["captcha_image_url"] ), "message": f" \n> {login.status.get('error_message','')}", }, ) if login.status and login.status.get("securitycode_required"): _LOGGER.debug( "Creating config_flow to request 2FA. Saved security code %s", self.securitycode, ) generated_securitycode: Text = login.get_totp_token() if ( self.securitycode or generated_securitycode ) and self.automatic_steps < 2: if self.securitycode: _LOGGER.debug( "Automatically submitting securitycode %s", self.securitycode ) else: _LOGGER.debug( "Automatically submitting generated securitycode %s", generated_securitycode, ) self.automatic_steps += 1 await sleep(5) if generated_securitycode: return await self.async_step_twofactor( user_input={CONF_SECURITYCODE: generated_securitycode} ) return await self.async_step_twofactor( user_input={CONF_SECURITYCODE: self.securitycode} ) self.twofactor_schema = OrderedDict( [ (vol.Optional(CONF_PROXY, default=False), bool), ( vol.Required( CONF_SECURITYCODE, default=self.securitycode if self.securitycode else "", ), str, ), ] ) self.automatic_steps = 0 return self.async_show_form( step_id="twofactor", data_schema=vol.Schema(self.twofactor_schema), errors={}, description_placeholders={ "email": login.email, "url": login.url, "message": f" \n> {login.status.get('error_message','')}", }, ) if login.status and login.status.get("claimspicker_required"): error_message = f" \n> {login.status.get('error_message', '')}" _LOGGER.debug("Creating config_flow to select verification method") claimspicker_message = login.status["claimspicker_message"] self.automatic_steps = 0 return self.async_show_form( step_id="claimspicker", data_schema=vol.Schema(self.claimspicker_schema), errors={}, description_placeholders={ "email": login.email, "url": login.url, "message": " \n> {} \n> {}".format( claimspicker_message, error_message ), }, ) if login.status and login.status.get("authselect_required"): _LOGGER.debug("Creating config_flow to select OTA method") error_message = login.status.get("error_message", "") authselect_message = login.status["authselect_message"] self.automatic_steps = 0 return self.async_show_form( step_id="authselect", data_schema=vol.Schema(self.authselect_schema), description_placeholders={ "email": login.email, "url": login.url, "message": " \n> {} \n> {}".format( authselect_message, error_message ), }, ) if login.status and login.status.get("verificationcode_required"): _LOGGER.debug("Creating config_flow to enter verification code") self.automatic_steps = 0 return self.async_show_form( step_id="verificationcode", data_schema=vol.Schema(self.verificationcode_schema), ) if ( login.status and login.status.get("force_get") and not login.status.get("ap_error_href") ): _LOGGER.debug("Creating config_flow to wait for user action") self.automatic_steps = 0 return self.async_show_form( step_id="action_required", data_schema=vol.Schema( OrderedDict([(vol.Optional(CONF_PROXY, default=False), bool)]) ), description_placeholders={ "email": login.email, "url": login.url, "message": f" \n>{login.status.get('message','')} \n", }, ) if login.status and (login.status.get("login_failed")): if login.oauth_login: _LOGGER.debug("Trying non-oauth login") await login.reset() login.oauth_login = False await login.login() return await self._test_login() _LOGGER.debug("Login failed: %s", login.status.get("login_failed")) await login.close() self.hass.components.persistent_notification.async_dismiss( f"alexa_media_{slugify(email)}{slugify(login.url[7:])}" ) return self.async_abort(reason="login_failed") new_schema = self._update_schema_defaults() if login.status and login.status.get("error_message"): _LOGGER.debug("Login error detected: %s", login.status.get("error_message")) if ( login.status.get("error_message") in { "There was a problem\n Enter a valid email or mobile number\n " } and self.automatic_steps < 2 ): _LOGGER.debug( "Trying automatic resubmission %s for error_message 'valid email'", self.automatic_steps, ) self.automatic_steps += 1 await sleep(5) return await self.async_step_user_legacy(user_input=self.config) _LOGGER.debug( "Done with automatic resubmission for error_message 'valid email'; returning error message", ) self.automatic_steps = 0 return self.async_show_form( step_id="user_legacy", data_schema=vol.Schema(new_schema), description_placeholders={ "message": f" \n> {login.status.get('error_message','')}" }, ) self.automatic_steps = 0 return self.async_show_form( step_id="user_legacy", data_schema=vol.Schema(new_schema), description_placeholders={ "message": f" \n> {login.status.get('error_message','')}" }, )
DATA_JS_VERSION = 'frontend_js_version' DATA_EXTRA_HTML_URL = 'frontend_extra_html_url' DATA_EXTRA_HTML_URL_ES5 = 'frontend_extra_html_url_es5' DATA_THEMES = 'frontend_themes' DATA_DEFAULT_THEME = 'frontend_default_theme' DEFAULT_THEME = 'default' PRIMARY_COLOR = 'primary-color' _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Optional(CONF_FRONTEND_REPO): cv.isdir, vol.Optional(CONF_THEMES): vol.Schema({cv.string: { cv.string: cv.string }}), vol.Optional(CONF_EXTRA_HTML_URL): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_EXTRA_HTML_URL_ES5): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_JS_VERSION, default=JS_DEFAULT_OPTION): vol.In(JS_OPTIONS) }), }, extra=vol.ALLOW_EXTRA)
_LOGGER = logging.getLogger(__name__) CONF_RATE = "rate" DEFAULT_HOST = "localhost" DEFAULT_PORT = 8125 DEFAULT_PREFIX = "hass" DEFAULT_RATE = 1 DOMAIN = "datadog" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_PREFIX, default=DEFAULT_PREFIX): cv.string, vol.Optional(CONF_RATE, default=DEFAULT_RATE): vol.All(vol.Coerce(int), vol.Range(min=1)), }) }, extra=vol.ALLOW_EXTRA, ) def setup(hass, config): """Set up the Datadog component.""" conf = config[DOMAIN]
CONF_SECURE_DEVICES_PIN, CONF_SERVICE_ACCOUNT, DEFAULT_EXPOSE_BY_DEFAULT, DEFAULT_EXPOSED_DOMAINS, DOMAIN, SERVICE_REQUEST_SYNC, ) from .const import EVENT_QUERY_RECEIVED # noqa: F401 from .http import GoogleAssistantView, GoogleConfig from .const import EVENT_COMMAND_RECEIVED, EVENT_SYNC_RECEIVED # noqa: F401, isort:skip _LOGGER = logging.getLogger(__name__) ENTITY_SCHEMA = vol.Schema({ vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_EXPOSE): cv.boolean, vol.Optional(CONF_ALIASES): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_ROOM_HINT): cv.string, }) GOOGLE_SERVICE_ACCOUNT = vol.Schema( { vol.Required(CONF_PRIVATE_KEY): cv.string, vol.Required(CONF_CLIENT_EMAIL): cv.string, }, extra=vol.ALLOW_EXTRA,
_LOGGER = logging.getLogger(__name__) STATE_FAN = 'Fan' STATE_HEAT = 'Heat' STATE_COOL = 'Cool' STATE_DRY = 'Dry' STATE_AUTO = 'Auto' STATE_QUIET = 'Quiet' STATE_LOW = 'Low' STATE_MEDIUM = 'Medium' STATE_HIGH = 'High' STATE_OFF = 'Off' SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_SCAN_INTERVAL): vol.All(vol.Coerce(int), vol.Range(min=1)), }) # Return cached results if last scan time was less than this value. # If a persistent connection is established for the controller, changes to values are in realtime. MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=180) try: from asyncio import ensure_future except ImportError: # Python 3.4.3 and ealier has this as async # pylint: disable=unused-import from asyncio import async ensure_future = async
CONF_NAME, ) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.script import Script _LOGGER = logging.getLogger(__name__) CONF_OFF_ACTION = "turn_off" DEFAULT_NAME = "Wake on LAN" DEFAULT_PING_TIMEOUT = 1 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_MAC): cv.string, vol.Optional(CONF_BROADCAST_ADDRESS): cv.string, vol.Optional(CONF_BROADCAST_PORT): cv.port, vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a wake on lan switch.""" broadcast_address = config.get(CONF_BROADCAST_ADDRESS)
PLATFORM_SCHEMA, BaseNotificationService, ) _LOGGER = logging.getLogger(__name__) ATTR_ENCRYPTED = "encrypted" CONF_DEVICE_KEY = "device_key" CONF_EVENT = "event" CONF_SALT = "salt" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_DEVICE_KEY): cv.string, vol.Optional(CONF_EVENT): cv.string, vol.Inclusive(CONF_PASSWORD, ATTR_ENCRYPTED): cv.string, vol.Inclusive(CONF_SALT, ATTR_ENCRYPTED): cv.string, }) def get_service(hass, config, discovery_info=None): """Get the Simplepush notification service.""" return SimplePushNotificationService(config) class SimplePushNotificationService(BaseNotificationService): """Implementation of the notification service for Simplepush."""
ATTR_NEW_IP = 'new_ip' ATTR_LAST_AUTHENTICATE_TIME = 'last_authenticated_time' ATTR_PREVIOUS_AUTHENTICATE_TIME = 'previous_authenticated_time' ATTR_USER = "******" SCAN_INTERVAL = timedelta(minutes=1) PLATFORM_NAME = 'authenticated' LOGFILE = 'home-assistant.log' OUTFILE = '.ip_authenticated.yaml' PROVIDERS = ['ipapi', 'extreme', 'ipvigilante'] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_PROVIDER, default='ipapi'): vol.In(PROVIDERS), vol.Optional(CONF_LOG_LOCATION, default=''): cv.string, vol.Optional(CONF_NOTIFY, default=True): cv.boolean, vol.Optional(CONF_EXCLUDE, default='None'): vol.All(cv.ensure_list, [cv.string]), }) def setup_platform(hass, config, add_devices, discovery_info=None): """Create the sensor""" notify = config.get(CONF_NOTIFY) exclude = config.get(CONF_EXCLUDE) hass.data[PLATFORM_NAME] = {} if not load_authentications(hass.config.path(".storage/auth")): return False
URL_GSE_CLOUD, ) _LOGGER = logging.getLogger(__name__) CONF_CODE_ARM_REQUIRED = "code_arm_required" DOMAIN = "gigasetelements" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_NAME, default="gigaset_elements"): cv.string, vol.Optional(CONF_SWITCHES, default=True): cv.boolean, vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean, vol.Optional(CONF_CODE, "code validation"): cv.string, }), }, extra=vol.ALLOW_EXTRA, ) retry_strategy = Retry( total=5, backoff_factor=2, status_forcelist=[429, 500, 502, 503, 504], method_whitelist=["DELETE", "GET", "POST"], )