ACCEPTED_VALUES = [ "no", "analog", "yes", "out", "out-sub", "sub", "hdbaset", "both", "up", ] ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema({ vol.Required(ATTR_ENTITY_ID): cv.entity_ids, vol.Required(ATTR_HDMI_OUTPUT): vol.In(ACCEPTED_VALUES), }) SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output" def determine_zones(receiver): """Determine what zones are available for the receiver.""" out = {"zone2": False, "zone3": False} try: _LOGGER.debug("Checking for zone 2 capability") receiver.raw("ZPWQSTN") out["zone2"] = True except ValueError as error: if str(error) != TIMEOUT_MESSAGE: raise error
def enum(enumClass): """Create validator for specified enum.""" return vol.All(vol.In(enumClass.__members__), enumClass.__getitem__)
class Wilson(ConfigurableClass): """Main interface to the wilson package, providing automatic running and matching in SMEFT and WET. Caching is used for intermediate results. Methods: - `from_wc`: Return a `Wilson` instance initialized by a `wcxf.WC` instance - `load_wc`: Return a `Wilson` instance initialized by a WCxf file-like object - `match_run`: Run the Wilson coefficients to a different scale (and possibly different EFT) and return them as `wcxf.WC` instance - `set_option`: Set configuration option - `get_option`: Show configuration option - `set_default_option`: Class method! Set deault configuration option affecting only future instances of the class. """ # default config options: # dictionary with option name as 'key' and default option value as 'value' _default_options = { 'smeft_accuracy': 'integrate', 'qed_order': 1, 'qcd_order': 1, 'smeft_matching_order': 0, 'smeft_matchingscale': 91.1876, 'mb_matchingscale': 4.2, 'mc_matchingscale': 1.3, 'parameters': {}, } # option schema: # Voluptuous schema defining allowed option values/types _option_schema = vol.Schema({ 'smeft_accuracy': vol.In(['integrate', 'leadinglog']), 'qed_order': vol.In([0, 1]), 'qcd_order': vol.In([0, 1]), 'smeft_matching_order': vol.In([0, 1]), 'smeft_matchingscale': vol.Coerce(float), 'mb_matchingscale': vol.Coerce(float), 'mc_matchingscale': vol.Coerce(float), 'parameters': vol.Schema({vol.Extra: vol.Coerce(float)}), }) def __init__(self, wcdict, scale, eft, basis): """Initialize the `Wilson` class. Parameters: - `wcdict`: dictionary of Wilson coefficient values at the input scale. The keys must exist as Wilson coefficients in the WCxf basis file. The values must be real or complex numbers (not dictionaries with key 'Re'/'Im'!) - `scale`: input scale in GeV - `eft`: input EFT - `basis`: input basis """ super().__init__() self.wc = wcxf.WC(eft=eft, basis=basis, scale=scale, values=wcxf.WC.dict2values(wcdict)) self.wc.validate() self._cache = {} def __hash__(self): """Return a hash of the `Wilson` instance. The hash changes when Wilson coefficient values or options are modified. It assumes that `wcxf.WC` instances are not modified after instantiation.""" return hash((self.wc, frozenset(self._options))) @classmethod def from_wc(cls, wc): """Return a `Wilson` instance initialized by a `wcxf.WC` instance""" return cls(wcdict=wc.dict, scale=wc.scale, eft=wc.eft, basis=wc.basis) @classmethod def load_wc(cls, stream): """Return a `Wilson` instance initialized by a WCxf file-like object""" wc = wcxf.WC.load(stream) return cls.from_wc(wc) def _repr_html_(self): r_wcxf = self.wc._repr_html_() r_wcxf = '\n'.join(r_wcxf.splitlines()[2:]) # remove WCxf heading html = "<h3>Wilson coefficients</h3>\n\n" html += r_wcxf return html @property def parameters(self): """Parameters to be used for running and translation.""" # start with a copy of the default parameters p = parameters.p.copy() # overwrite by the user defined parameters, if any p.update(self.get_option('parameters')) return p @property def matching_parameters(self): """Parameters to be used for the SMEFT->WET matching.""" # start with a copy of the numerical parameters p = self.parameters.copy() # properly set 'loop_order' for `match.smeft.match_all` p['loop_order'] = self.get_option('smeft_matching_order') return p def _wetrun_opt(self): """Return a dictionary of options to pass to a `run.wet.WETrunner` instance.""" return { 'qed_order': self.get_option('qed_order'), 'qcd_order': self.get_option('qcd_order') } def match_run(self, scale, eft, basis, sectors='all'): """Run the Wilson coefficients to a different scale (and possibly different EFT) and return them as `wcxf.WC` instance. Parameters: - `scale`: output scale in GeV - `eft`: output EFT - `basis`: output basis - `sectors`: in the case of WET (or WET-4 or WET-3), a tuple of sector names can be optionally provided. In this case, only the Wilson coefficients from this sector(s) will be returned and all others discareded. This can speed up the computation significantly if only a small number of sectors is of interest. The sector names are defined in the WCxf basis file. """ cached = self._get_from_cache(sector=sectors, scale=scale, eft=eft, basis=basis) if cached is not None: return cached if sectors == 'all': # the default value for sectors is "None" for translators translate_sectors = None else: translate_sectors = sectors scale_ew = self.get_option('smeft_matchingscale') mb = self.get_option('mb_matchingscale') mc = self.get_option('mc_matchingscale') if self.wc.basis == basis and self.wc.eft == eft and scale == self.wc.scale: return self.wc # nothing to do if self.wc.eft == eft and scale == self.wc.scale: wc_out = self.wc.translate( basis, sectors=translate_sectors, parameters=self.parameters) # only translation necessary self._set_cache(sectors, scale, eft, basis, wc_out) return wc_out if self.wc.eft == 'SMEFT': smeft_accuracy = self.get_option('smeft_accuracy') if eft == 'SMEFT': smeft = SMEFT( self.wc.translate('Warsaw', sectors=translate_sectors, parameters=self.parameters)) # if input and output EFT ist SMEFT, just run. wc_out = smeft.run(scale, accuracy=smeft_accuracy).translate(basis) self._set_cache('all', scale, 'SMEFT', wc_out.basis, wc_out) return wc_out else: # if SMEFT -> WET-x: match to WET at the EW scale wc_ew = self._get_from_cache(sector='all', scale=scale_ew, eft='WET', basis='JMS') if wc_ew is None: if self.wc.scale == scale_ew: wc_ew = self.wc.match( 'WET', 'JMS', parameters=self.matching_parameters ) # no need to run else: smeft = SMEFT( self.wc.translate('Warsaw', parameters=self.parameters)) wc_ew = smeft.run( scale_ew, accuracy=smeft_accuracy).match( 'WET', 'JMS', parameters=self.matching_parameters) self._set_cache('all', scale_ew, wc_ew.eft, wc_ew.basis, wc_ew) wet = WETrunner(wc_ew, **self._wetrun_opt()) elif self.wc.eft in ['WET', 'WET-4', 'WET-3']: wet = WETrunner( self.wc.translate('JMS', parameters=self.parameters, sectors=translate_sectors), **self._wetrun_opt()) else: raise ValueError("Input EFT {} unknown or not supported".format( self.wc.eft)) if eft == wet.eft: # just run wc_out = wet.run(scale, sectors=sectors).translate( basis, sectors=translate_sectors, parameters=self.parameters) self._set_cache(sectors, scale, eft, basis, wc_out) return wc_out elif eft == 'WET-4' and wet.eft == 'WET': # match at mb wc_mb = wet.run(mb, sectors=sectors).match( 'WET-4', 'JMS', parameters=self.matching_parameters) wet4 = WETrunner(wc_mb, **self._wetrun_opt()) wc_out = wet4.run(scale, sectors=sectors).translate( basis, sectors=translate_sectors, parameters=self.parameters) self._set_cache(sectors, scale, 'WET-4', basis, wc_out) return wc_out elif eft == 'WET-3' and wet.eft == 'WET-4': # match at mc wc_mc = wet.run(mc, sectors=sectors).match( 'WET-3', 'JMS', parameters=self.matching_parameters) wet3 = WETrunner(wc_mc, **self._wetrun_opt()) wc_out = wet3.run(scale, sectors=sectors).translate( basis, sectors=translate_sectors, parameters=self.parameters) return wc_out self._set_cache(sectors, scale, 'WET-3', basis, wc_out) elif eft == 'WET-3' and wet.eft == 'WET': # match at mb and mc wc_mb = wet.run(mb, sectors=sectors).match( 'WET-4', 'JMS', parameters=self.matching_parameters) wet4 = WETrunner(wc_mb, **self._wetrun_opt()) wc_mc = wet4.run(mc, sectors=sectors).match( 'WET-3', 'JMS', parameters=self.matching_parameters) wet3 = WETrunner(wc_mc, **self._wetrun_opt()) wc_out = wet3.run(scale, sectors=sectors).translate( basis, sectors=translate_sectors, parameters=self.parameters) self._set_cache(sectors, scale, 'WET-3', basis, wc_out) return wc_out else: raise ValueError("Running from {} to {} not implemented".format( wet.eft, eft)) def clear_cache(self): self._cache = {} def _get_from_cache(self, sector, scale, eft, basis): """Try to load a set of Wilson coefficients from the cache, else return None.""" try: return self._cache[eft][scale][basis][sector] except KeyError: return None def _set_cache(self, sector, scale, eft, basis, wc_out): if eft not in self._cache: self._cache[eft] = {scale: {basis: {sector: wc_out}}} elif scale not in self._cache[eft]: self._cache[eft][scale] = {basis: {sector: wc_out}} elif basis not in self._cache[eft][scale]: self._cache[eft][scale][basis] = {sector: wc_out} else: self._cache[eft][scale][basis][sector] = wc_out
_DEFAULT_TT = 0.2 _ATTR_PRESET = "preset" _ATTR_COLOR_BW = "color_bw" _CBW_COLOR = "color" _CBW_AUTO = "auto" _CBW_BW = "bw" _CBW = [_CBW_COLOR, _CBW_AUTO, _CBW_BW] _SRV_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids}) _SRV_GOTO_SCHEMA = _SRV_SCHEMA.extend( {vol.Required(_ATTR_PRESET): vol.All(vol.Coerce(int), vol.Range(min=1))}) _SRV_CBW_SCHEMA = _SRV_SCHEMA.extend( {vol.Required(_ATTR_COLOR_BW): vol.In(_CBW)}) _SRV_PTZ_SCHEMA = _SRV_SCHEMA.extend({ vol.Required(_ATTR_PTZ_MOV): vol.In(_MOV), vol.Optional(_ATTR_PTZ_TT, default=_DEFAULT_TT): cv.small_float, }) CAMERA_SERVICES = { _SRV_EN_REC: (_SRV_SCHEMA, "async_enable_recording", ()), _SRV_DS_REC: (_SRV_SCHEMA, "async_disable_recording", ()), _SRV_EN_AUD: (_SRV_SCHEMA, "async_enable_audio", ()), _SRV_DS_AUD: (_SRV_SCHEMA, "async_disable_audio", ()), _SRV_EN_MOT_REC: (_SRV_SCHEMA, "async_enable_motion_recording", ()), _SRV_DS_MOT_REC: (_SRV_SCHEMA, "async_disable_motion_recording", ()), _SRV_GOTO: (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET, )),
connection.send_result(msg[ID]) @websocket_api.require_admin @websocket_api.websocket_command( { vol.Required(TYPE): "zwave_js/update_log_config", vol.Required(ENTRY_ID): str, vol.Required(CONFIG): vol.All( vol.Schema( { vol.Optional(ENABLED): cv.boolean, vol.Optional(LEVEL): vol.All( cv.string, vol.Lower, vol.In([log_level.value for log_level in LogLevel]), lambda val: LogLevel(val), # pylint: disable=unnecessary-lambda ), vol.Optional(LOG_TO_FILE): cv.boolean, vol.Optional(FILENAME): cv.string, vol.Optional(FORCE_CONSOLE): cv.boolean, } ), cv.has_at_least_one_key( ENABLED, FILENAME, FORCE_CONSOLE, LEVEL, LOG_TO_FILE ), filename_is_present_if_logging_to_file, ), }, ) @websocket_api.async_response
["stats", "lvl"]), "gp": ST("Gold", "mdi:coin", "Gold", ["stats", "gp"]), "class": ST("Class", "mdi:sword", "", ["stats", "class"]), } INSTANCE_SCHEMA = vol.Schema({ vol.Optional(CONF_URL, default=DEFAULT_URL): cv.url, vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_API_USER): cv.string, vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_SENSORS, default=list(SENSORS_TYPES)): vol.All(cv.ensure_list, vol.Unique(), [vol.In(list(SENSORS_TYPES))]), }) has_unique_values = vol.Schema(vol.Unique()) # pylint: disable=invalid-name # because we want a handy alias def has_all_unique_users(value): """Validate that all API users are unique.""" api_users = [user[CONF_API_USER] for user in value] has_unique_values(api_users) return value def has_all_unique_users_names(value): """Validate that all user's names are unique and set if any is set."""
['Volume', ['chime', 'doorbell', 'stickup_cams'], None, 'bell-ring', None], 'wifi_signal_category': [ 'WiFi Signal Category', ['chime', 'doorbell', 'stickup_cams'], None, 'wifi', None ], 'wifi_signal_strength': [ 'WiFi Signal Strength', ['chime', 'doorbell', 'stickup_cams'], 'dBm', 'wifi', None ], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_ENTITY_NAMESPACE, default=DEFAULT_ENTITY_NAMESPACE): cv.string, vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]), }) def setup_platform(hass, config, add_devices, discovery_info=None): """Set up a sensor for a Ring device.""" ring = hass.data[DATA_RING] sensors = [] for sensor_type in config.get(CONF_MONITORED_CONDITIONS): for device in ring.chimes: if 'chime' in SENSOR_TYPES[sensor_type][1]: sensors.append(RingSensor(hass, device, sensor_type)) for device in ring.doorbells: if 'doorbell' in SENSOR_TYPES[sensor_type][1]:
ATTR_PLATE = 'plate' ATTR_PLATES = 'plates' ATTR_VEHICLES = 'vehicles' OPENALPR_REGIONS = [ 'au', 'auwide', 'br', 'eu', 'fr', 'gb', 'kr', 'kr2', 'mx', 'sg', 'us', 'vn2' ] CONF_ALPR_BIN = 'alp_bin' DEFAULT_BINARY = 'alpr' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_REGION): vol.All(vol.Lower, vol.In(OPENALPR_REGIONS)), vol.Optional(CONF_ALPR_BIN, default=DEFAULT_BINARY): cv.string, }) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the OpenALPR local platform.""" command = [config[CONF_ALPR_BIN], '-c', config[CONF_REGION], '-'] confidence = config[CONF_CONFIDENCE] entities = [] for camera in config[CONF_SOURCE]:
"precipitation_forecast_total": [ "Precipitation forecast total", "mm", "mdi:weather-pouring", ], "next_rain_forecast": ["Next rain forecast", "minutes", "mdi:weather-pouring"], "precipitation_forecast_intensity": ["Precipitation forecast intensity", None, "mdi:weather-pouring"], } CONF_TIMEFRAME = "timeframe" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_MONITORED_CONDITIONS, default=["precipitation"]): vol.All(cv.ensure_list, vol.Length(min=1), [vol.In(SENSOR_TYPES.keys())]), vol.Inclusive(CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together"): cv.latitude, vol.Inclusive(CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together"): cv.longitude, vol.Optional(CONF_TIMEFRAME, default=60): vol.All(vol.Coerce(int), vol.Range(min=5, max=120)), vol.Optional(CONF_NAME, default="ba"): cv.string, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up Buienalarm.""" latitude = config.get(CONF_LATITUDE, hass.config.latitude)
ATTR_TRANSITION: VALID_TRANSITION, ATTR_BRIGHTNESS: VALID_BRIGHTNESS, ATTR_BRIGHTNESS_PCT: VALID_BRIGHTNESS_PCT, vol.Exclusive(ATTR_COLOR_NAME, COLOR_GROUP): cv.string, vol.Exclusive(ATTR_RGB_COLOR, COLOR_GROUP): vol.All(vol.ExactSequence((cv.byte, cv.byte, cv.byte)), vol.Coerce(tuple)), vol.Exclusive(ATTR_XY_COLOR, COLOR_GROUP): vol.All(vol.ExactSequence((cv.small_float, cv.small_float)), vol.Coerce(tuple)), vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): vol.All(vol.Coerce(int), vol.Range(min=1)), vol.Exclusive(ATTR_KELVIN, COLOR_GROUP): vol.All(vol.Coerce(int), vol.Range(min=0)), ATTR_WHITE_VALUE: vol.All(vol.Coerce(int), vol.Range(min=0, max=255)), ATTR_FLASH: vol.In([FLASH_SHORT, FLASH_LONG]), ATTR_EFFECT: cv.string, }) LIGHT_TURN_OFF_SCHEMA = vol.Schema({ ATTR_ENTITY_ID: cv.entity_ids, ATTR_TRANSITION: VALID_TRANSITION, ATTR_FLASH: vol.In([FLASH_SHORT, FLASH_LONG]), }) LIGHT_TOGGLE_SCHEMA = vol.Schema({ ATTR_ENTITY_ID: cv.entity_ids, ATTR_TRANSITION: VALID_TRANSITION, }) PROFILE_SCHEMA = vol.Schema(
DOMAIN, DATA_COORDINATOR, DATA_CAM, ) _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5) CONF_FRAMES = "frames" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_STATION): cv.string, vol.Optional(CONF_STYLE, default="Standard"): vol.In(STYLES), vol.Optional(CONF_FRAMES): cv.positive_int, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_TYPE, default="NCR"): vol.In(RADAR_TYPES.values()), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up NWS radar-loop camera component.""" _LOGGER.warning( "YAML configuration deprecated. It will be removed in nwsradar v0.6.0") station = config[CONF_STATION].upper() style = config.get(CONF_STYLE) or "Standard"
("partlycloudy", ["A few clouds", "Partly cloudy"]), ]) ERRORS = (aiohttp.ClientError, JSONDecodeError) FORECAST_MODE = ["daynight", "hourly"] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_NAME): cv.string, vol.Inclusive(CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together"): cv.latitude, vol.Inclusive(CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together"): cv.longitude, vol.Optional(CONF_MODE, default="daynight"): vol.In(FORECAST_MODE), vol.Optional(CONF_STATION): cv.string, vol.Required(CONF_API_KEY): cv.string, }) def convert_condition(time, weather): """ Convert NWS codes to HA condition. Choose first condition in CONDITION_CLASSES that exists in weather code. If no match is found, return first condition from NWS """ conditions = [w[0] for w in weather]
CONF_BETA = "beta" CONF_IMAGE = "image" DEFAULT_IMAGE = "default" DEFAULT_NAME_LATEST = "Latest Version" DEFAULT_NAME_LOCAL = "Current Version" DEFAULT_SOURCE = "local" TIME_BETWEEN_UPDATES = timedelta(minutes=5) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_BETA, default=False): cv.boolean, vol.Optional(CONF_IMAGE, default=DEFAULT_IMAGE): vol.In(ALL_IMAGES), vol.Optional(CONF_NAME, default=""): cv.string, vol.Optional(CONF_SOURCE, default=DEFAULT_SOURCE): vol.In(ALL_SOURCES), }) _LOGGER: logging.Logger = logging.getLogger(__name__) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Version sensor platform."""
"" #defining sensor types: 'key' : ['name', 'units'] SENSOR_TYPES = { 'temperature' : ['Temperature', '°C'], 'pressure' : ['Pressure', 'hPa'], 'humidity_level': ['Humidity', '%'], 'co2_level' : ['CO2', 'ppm' ], 'battery_level' : ['Battery', '%'] } #platfor schema PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_MAC) : cv.string, vol.Required(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_TYPES) : vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]), vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT) : cv.positive_int, vol.Optional(CONF_CACHE, default=DEFAULT_UPDATE_INTERVAL) : cv.positive_int, vol.Optional(CONF_ADAPTER, default=DEFAULT_ADAPTER) : cv.string, vol.Optional(CONF_SAMPLE_RATE, default = 10) : cv.positive_int }) def setup_platform(hass, config, add_devices, discovery_info = None): """ SetUp CO2 sensor""" from CO2 import sensor_co2 sample_rate = config.get(CONF_SAMPLE_RATE) cache = config.get(CONF_CACHE)
CONF_TYPE: CONF_IS_OPEN }, { CONF_TYPE: CONF_IS_NOT_OPEN }], DEVICE_CLASS_NONE: [{ CONF_TYPE: CONF_IS_ON }, { CONF_TYPE: CONF_IS_OFF }], } CONDITION_SCHEMA = cv.DEVICE_CONDITION_BASE_SCHEMA.extend({ vol.Required(CONF_ENTITY_ID): cv.entity_id, vol.Required(CONF_TYPE): vol.In(IS_OFF + IS_ON), vol.Optional(CONF_FOR): cv.positive_time_period_dict, }) async def async_get_conditions(hass: HomeAssistant, device_id: str) -> List[Dict[str, str]]: """List device conditions.""" conditions: List[Dict[str, str]] = [] entity_registry = await async_get_registry(hass) entries = [ entry for entry in async_entries_for_device(entity_registry, device_id) if entry.domain == DOMAIN ]
ATTR_STATE = 'state' DEFAULT_ZONES = [] SIGNAL_ZONE_CHANGED = 'ness_alarm.zone_changed' SIGNAL_ARMING_STATE_CHANGED = 'ness_alarm.arming_state_changed' ZoneChangedData = namedtuple('ZoneChangedData', ['zone_id', 'state']) DEFAULT_ZONE_TYPE = 'motion' ZONE_SCHEMA = vol.Schema({ vol.Required(CONF_ZONE_NAME): cv.string, vol.Required(CONF_ZONE_ID): cv.positive_int, vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): vol.In(DEVICE_CLASSES) }) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema({ vol.Required(CONF_DEVICE_HOST): cv.string, vol.Required(CONF_DEVICE_PORT): cv.port, vol.Optional(CONF_ZONES, default=DEFAULT_ZONES): vol.All(cv.ensure_list, [ZONE_SCHEMA]), }), }, extra=vol.ALLOW_EXTRA)
vol.Optional(CONF_FILTER, default=dict): entityfilter.FILTER_SCHEMA, }) ALEXA_SCHEMA = ASSISTANT_SCHEMA.extend({ vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: ALEXA_ENTITY_SCHEMA} }) GACTIONS_SCHEMA = ASSISTANT_SCHEMA.extend({ vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: GOOGLE_ENTITY_SCHEMA}, }) # pylint: disable=no-value-for-parameter CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.In([MODE_DEV, MODE_PROD]), vol.Optional(CONF_COGNITO_CLIENT_ID): str, vol.Optional(CONF_USER_POOL_ID): str, vol.Optional(CONF_REGION): str, vol.Optional(CONF_RELAYER): str, vol.Optional(CONF_GOOGLE_ACTIONS_SYNC_URL): vol.Url(), vol.Optional(CONF_SUBSCRIPTION_INFO_URL): vol.Url(), vol.Optional(CONF_CLOUDHOOK_CREATE_URL): vol.Url(), vol.Optional(CONF_REMOTE_API_URL): vol.Url(), vol.Optional(CONF_ACME_DIRECTORY_SERVER): vol.Url(), vol.Optional(CONF_ALEXA): ALEXA_SCHEMA, vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA, vol.Optional(CONF_ALEXA_ACCESS_TOKEN_URL): str, }), }, extra=vol.ALLOW_EXTRA)
CONF_STREAM_PATH = "stream_path" DEFAULT_CAMERA_BRAND = "VIVOTEK" DEFAULT_NAME = "VIVOTEK Camera" DEFAULT_EVENT_0_KEY = "event_i0_enable" DEFAULT_SECURITY_LEVEL = "admin" DEFAULT_STREAM_SOURCE = "live.sdp" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_IP_ADDRESS): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_AUTHENTICATION, default=HTTP_BASIC_AUTHENTICATION): vol.In( [HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION] ), vol.Optional(CONF_SSL, default=False): cv.boolean, vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean, vol.Optional(CONF_FRAMERATE, default=2): cv.positive_int, vol.Optional(CONF_SECURITY_LEVEL, default=DEFAULT_SECURITY_LEVEL): cv.string, vol.Optional(CONF_STREAM_PATH, default=DEFAULT_STREAM_SOURCE): cv.string, } ) def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None,
BATTERY_STATES = [ ATTR_BATTERY_STATE_UNPLUGGED, ATTR_BATTERY_STATE_CHARGING, ATTR_BATTERY_STATE_FULL, ATTR_BATTERY_STATE_UNKNOWN ] ATTR_DEVICES = 'devices' ACTION_SCHEMA = vol.Schema( { vol.Required(CONF_PUSH_ACTIONS_IDENTIFIER): vol.Upper, vol.Required(CONF_PUSH_ACTIONS_TITLE): cv.string, vol.Optional(CONF_PUSH_ACTIONS_ACTIVATION_MODE, default=ATTR_BACKGROUND): vol.In(ACTIVATION_MODES), vol.Optional(CONF_PUSH_ACTIONS_AUTHENTICATION_REQUIRED, default=False): cv.boolean, vol.Optional(CONF_PUSH_ACTIONS_DESTRUCTIVE, default=False): cv.boolean, vol.Optional(CONF_PUSH_ACTIONS_BEHAVIOR, default=ATTR_DEFAULT_BEHAVIOR): vol.In(BEHAVIORS), vol.Optional(CONF_PUSH_ACTIONS_TEXT_INPUT_BUTTON_TITLE): cv.string, vol.Optional(CONF_PUSH_ACTIONS_TEXT_INPUT_PLACEHOLDER): cv.string, }, extra=vol.ALLOW_EXTRA) ACTION_SCHEMA_LIST = vol.All(cv.ensure_list, [ACTION_SCHEMA])
'request_in_progress' ] CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_REGION, default=DEFAULT_REGION): cv.string, vol.Optional(CONF_MUTABLE, default=True): cv.boolean, vol.Optional(CONF_SPIN, default=''): cv.string, vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_UPDATE_INTERVAL): ( vol.All(cv.time_period, vol.Clamp(min=MIN_UPDATE_INTERVAL))), vol.Optional(CONF_NAME, default={}): vol.Schema( {cv.slug: cv.string}), vol.Optional(CONF_RESOURCES): vol.All( cv.ensure_list, [vol.In(RESOURCES)]) }), }, extra=vol.ALLOW_EXTRA) def setup(hass, config): """Setup Volkswagen Carnet component""" interval = config[DOMAIN].get(CONF_SCAN_INTERVAL) data = hass.data[DATA_KEY] = VolkswagenData(config) from volkswagencarnet import Connection _LOGGER.debug("Creating connection to volkswagen carnet") connection = Connection( username = config[DOMAIN].get(CONF_USERNAME), password = config[DOMAIN].get(CONF_PASSWORD), )
from homeassistant.helpers.typing import HomeAssistantType from .const import ( CONF_MOUNT_DIR, CONF_TYPE_OWFS, CONF_TYPE_OWSERVER, CONF_TYPE_SYSBUS, DEFAULT_OWSERVER_HOST, DEFAULT_OWSERVER_PORT, DEFAULT_SYSBUS_MOUNT_DIR, DOMAIN, ) from .onewirehub import CannotConnect, InvalidPath, OneWireHub DATA_SCHEMA_USER = vol.Schema( {vol.Required(CONF_TYPE): vol.In([CONF_TYPE_OWSERVER, CONF_TYPE_SYSBUS])} ) DATA_SCHEMA_OWSERVER = vol.Schema( { vol.Required(CONF_HOST, default=DEFAULT_OWSERVER_HOST): str, vol.Required(CONF_PORT, default=DEFAULT_OWSERVER_PORT): int, } ) DATA_SCHEMA_MOUNTDIR = vol.Schema( { vol.Required(CONF_MOUNT_DIR, default=DEFAULT_SYSBUS_MOUNT_DIR): str, } ) async def validate_input_owserver(hass: HomeAssistantType, data):
vol.Exclusive(CONF_PORT, "plm_or_hub", msg=CONF_PLM_HUB_MSG): cv.string, vol.Exclusive(CONF_HOST, "plm_or_hub", msg=CONF_PLM_HUB_MSG): cv.string, vol.Optional(CONF_IP_PORT): cv.port, vol.Optional(CONF_HUB_USERNAME): cv.string, vol.Optional(CONF_HUB_PASSWORD): cv.string, vol.Optional(CONF_HUB_VERSION, default=2): vol.In([1, 2]), vol.Optional(CONF_OVERRIDE): vol.All(cv.ensure_list_csv, [CONF_DEVICE_OVERRIDE_SCHEMA]), vol.Optional(CONF_X10): vol.All(cv.ensure_list_csv, [CONF_X10_SCHEMA]), }, extra=vol.ALLOW_EXTRA, required=True, ), cv.has_at_least_one_key(CONF_PORT, CONF_HOST), set_default_port, ) }, extra=vol.ALLOW_EXTRA, )
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_DHCP_SOFTWARE = "dhcp_software" DEFAULT_DHCP_SOFTWARE = "dnsmasq" DHCP_SOFTWARES = ["dnsmasq", "odhcpd", "none"] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_DHCP_SOFTWARE, default=DEFAULT_DHCP_SOFTWARE): vol.In( DHCP_SOFTWARES ), } ) def get_scanner(hass, config): """Validate the configuration and return an ubus scanner.""" dhcp_sw = config[DOMAIN][CONF_DHCP_SOFTWARE] if dhcp_sw == "dnsmasq": scanner = DnsmasqUbusDeviceScanner(config[DOMAIN]) elif dhcp_sw == "odhcpd": scanner = OdhcpdUbusDeviceScanner(config[DOMAIN]) else: scanner = UbusDeviceScanner(config[DOMAIN])
def build_remove_override_schema(data): """Build the schema to remove device overrides in config flow options.""" selection = [] for override in data: selection.append(override[CONF_ADDRESS]) return vol.Schema({vol.Required(CONF_ADDRESS): vol.In(selection)})
if date_val is None: raise vol.Invalid("Invalid datetime specified: {}".format(value)) return date_val def time_zone(value): """Validate timezone.""" if dt_util.get_time_zone(value) is not None: return value raise vol.Invalid( "Invalid time zone passed in. Valid options can be found here: " "http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") weekdays = vol.All(ensure_list, [vol.In(WEEKDAYS)]) def socket_timeout(value): """Validate timeout float > 0.0. None coerced to socket._GLOBAL_DEFAULT_TIMEOUT bare object. """ if value is None: return _GLOBAL_DEFAULT_TIMEOUT try: float_value = float(value) if float_value > 0.0: return float_value raise vol.Invalid("Invalid socket timeout value." " float > 0.0 required.")
SCAN_INTERVAL = timedelta(minutes=5) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_TRAINS): [{ vol.Required(CONF_NAME): cv.string, vol.Required(CONF_TO): cv.string, vol.Required(CONF_FROM): cv.string, vol.Optional(CONF_TIME): cv.time, vol.Optional(CONF_WEEKDAY, default=WEEKDAYS): vol.All(cv.ensure_list, [vol.In(WEEKDAYS)]), }], }) async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the departure sensor.""" httpsession = async_get_clientsession(hass) train_api = TrafikverketTrain(httpsession, config[CONF_API_KEY]) sensors = [] station_cache = {}
# mypy: allow-untyped-defs TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend({ vol.Required(CONF_PLATFORM): mqtt.DOMAIN, vol.Required(CONF_TOPIC): mqtt.util.valid_subscribe_topic_template, vol.Optional(CONF_PAYLOAD): cv.template, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string, vol.Optional(CONF_QOS, default=DEFAULT_QOS): vol.All(vol.Coerce(int), vol.In([0, 1, 2])), }) _LOGGER = logging.getLogger(__name__) async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: TriggerActionType, trigger_info: TriggerInfo, ) -> CALLBACK_TYPE: """Listen for state changes based on configuration.""" trigger_data = trigger_info["trigger_data"] topic = config[CONF_TOPIC] wanted_payload = config.get(CONF_PAYLOAD)
device_class=SensorDeviceClass.HUMIDITY, native_unit_of_measurement=PERCENTAGE, value_fn=lambda sensor: ( round(val) # pylint: disable=undefined-variable if (val := sensor.humidity) else None), ), ) SENSOR_KEYS: list[str] = [desc.key for desc in SENSOR_TYPES] MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.All(vol.Coerce(int), vol.Range(min=0x44, max=0x45)), vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_KEYS): vol.All(cv.ensure_list, [vol.In(SENSOR_KEYS)]), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, }) def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the sensor platform.""" _LOGGER.warning( "The Sensirion SHT31 integration is deprecated and will be removed " "in Home Assistant Core 2022.4; this integration is removed under "
REQUIREMENTS = ['python-miio>=0.5.0'] SUPPORT_FLAGS = (SUPPORT_TARGET_TEMPERATURE) SERVICE_SET_ROOM_TEMP = 'miheater_set_room_temperature' PRECISION = 1 MIN_TEMP = 18 MIN_TEMP_ZB1 = 16 MAX_TEMP = 28 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_NAME): cv.string, vol.Required(CONF_TOKEN): cv.string, vol.Optional(CONF_MODEL, default=None): vol.In(['zhimi.heater.mc2', 'zhimi.heater.zb1', 'zhimi.heater.za2', None]), }) SET_ROOM_TEMP_SCHEMA = vol.Schema( {vol.Optional('temperature'): cv.positive_int}) DEVICE_MODEL = "" ATTR_MODEL = 'model' def setup_platform(hass, config, add_devices, discovery_info=None): """Perform the setup for Xiaomi heaters.""" host = config.get(CONF_HOST) name = config.get(CONF_NAME) token = config.get(CONF_TOKEN) model = config.get(CONF_MODEL)
"TR", "TK", "UA", "UZ", "VU", "CY", "SN", "JI", "YI", ] PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_PWS_ID): cv.string, vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.All(vol.In(LANG_CODES)), vol.Inclusive( CONF_LATITUDE, "coordinates", "Latitude and longitude must exist together" ): cv.latitude, vol.Inclusive( CONF_LONGITUDE, "coordinates", "Latitude and longitude must exist together" ): cv.longitude, vol.Required(CONF_MONITORED_CONDITIONS): vol.All( cv.ensure_list, vol.Length(min=1), [vol.In(SENSOR_TYPES)] ), } ) async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None