if negative_offset: offset *= -1 return offset def time_period_seconds(value: Union[int, str]) -> timedelta: """Validate and transform seconds to a time offset.""" try: return timedelta(seconds=int(value)) except (ValueError, TypeError): raise vol.Invalid('Expected seconds, got {}'.format(value)) time_period = vol.Any(time_period_str, time_period_seconds, timedelta, time_period_dict) def match_all(value): """Validate that matches all values.""" return value def platform_validator(domain): """Validate if platform exists for given domain.""" def validator(value): """Test if platform exists.""" if value is None: raise vol.Invalid('platform cannot be None') if get_platform(domain, str(value)): return value
cv.deprecated(CONF_TLS_VERSION), vol.Schema({ vol.Optional(CONF_CLIENT_ID): cv.string, vol.Optional(CONF_KEEPALIVE, default=DEFAULT_KEEPALIVE): vol.All(vol.Coerce(int), vol.Range(min=15)), vol.Optional(CONF_BROKER): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_USERNAME): cv.string, vol.Optional(CONF_PASSWORD): cv.string, vol.Optional(CONF_CERTIFICATE): vol.Any("auto", cv.isfile), vol.Inclusive(CONF_CLIENT_KEY, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG): cv.isfile, vol.Inclusive(CONF_CLIENT_CERT, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG): cv.isfile, vol.Optional(CONF_TLS_INSECURE): cv.boolean, vol.Optional(CONF_TLS_VERSION, default=DEFAULT_TLS_PROTOCOL): vol.Any("auto", "1.0", "1.1", "1.2"), vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.All(cv.string, vol.In([PROTOCOL_31, PROTOCOL_311])), vol.Optional(CONF_WILL_MESSAGE, default=DEFAULT_WILL):
async def webhook_update_sensor_states(hass, config_entry, data): """Handle an update sensor states webhook.""" sensor_schema_full = vol.Schema({ vol.Optional(ATTR_SENSOR_ATTRIBUTES, default={}): dict, vol.Optional(ATTR_SENSOR_ICON, default="mdi:cellphone"): cv.icon, vol.Required(ATTR_SENSOR_STATE): vol.Any(None, bool, str, int, float), vol.Required(ATTR_SENSOR_TYPE): vol.In(SENSOR_TYPES), vol.Required(ATTR_SENSOR_UNIQUE_ID): cv.string, }) device_name = config_entry.data[ATTR_DEVICE_NAME] resp = {} for sensor in data: entity_type = sensor[ATTR_SENSOR_TYPE] unique_id = sensor[ATTR_SENSOR_UNIQUE_ID] unique_store_key = f"{config_entry.data[CONF_WEBHOOK_ID]}_{unique_id}" entity_registry = er.async_get(hass) if not entity_registry.async_get_entity_id(entity_type, DOMAIN, unique_store_key): _LOGGER.error( "Refusing to update %s non-registered sensor: %s", device_name, unique_store_key, ) err_msg = f"{entity_type} {unique_id} is not registered" resp[unique_id] = { "success": False, "error": { "code": ERR_SENSOR_NOT_REGISTERED, "message": err_msg }, } continue try: sensor = sensor_schema_full(sensor) except vol.Invalid as err: err_msg = vol.humanize.humanize_error(sensor, err) _LOGGER.error( "Received invalid sensor payload from %s for %s: %s", device_name, unique_id, err_msg, ) resp[unique_id] = { "success": False, "error": { "code": ERR_INVALID_FORMAT, "message": err_msg }, } continue sensor[CONF_WEBHOOK_ID] = config_entry.data[CONF_WEBHOOK_ID] async_dispatcher_send(hass, SIGNAL_SENSOR_UPDATE, sensor) resp[unique_id] = {"success": True} return webhook_response(resp, registration=config_entry.data)
ACTIVE_MODE_NIGHTLIGHT = "1" ACTIVE_COLOR_FLOWING = "1" NIGHTLIGHT_SWITCH_TYPE_LIGHT = "light" SCAN_INTERVAL = timedelta(seconds=30) YEELIGHT_RGB_TRANSITION = "RGBTransition" YEELIGHT_HSV_TRANSACTION = "HSVTransition" YEELIGHT_TEMPERATURE_TRANSACTION = "TemperatureTransition" YEELIGHT_SLEEP_TRANSACTION = "SleepTransition" YEELIGHT_FLOW_TRANSITION_SCHEMA = { vol.Optional(ATTR_COUNT, default=0): cv.positive_int, vol.Optional(ATTR_ACTION, default=ACTION_RECOVER): vol.Any( ACTION_RECOVER, ACTION_OFF, ACTION_STAY ), vol.Required(ATTR_TRANSITIONS): [ { vol.Exclusive(YEELIGHT_RGB_TRANSITION, CONF_TRANSITION): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Exclusive(YEELIGHT_HSV_TRANSACTION, CONF_TRANSITION): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Exclusive(YEELIGHT_TEMPERATURE_TRANSACTION, CONF_TRANSITION): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Exclusive(YEELIGHT_SLEEP_TRANSACTION, CONF_TRANSITION): vol.All( cv.ensure_list, [cv.positive_int] ),
vol.Optional(ATTR_URL): vol.Url(), vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)), vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), vol.Optional(ATTR_PORTS): DOCKER_PORTS, vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION, vol.Optional(ATTR_WEBUI): vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"), vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(), vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(NETWORK_PORT, vol.Equal(0)), vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str), vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str), vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str), vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)), vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(), vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(), vol.Optional(ATTR_HOST_IPC, default=False):
class DailyRule(Rule): SPEC_SCHEMA = voluptuous.Schema({ 'start': turoboro.common.is_iso_datetime, 'end': voluptuous.Any( None, turoboro.common.is_iso_datetime ), 'repeat': voluptuous.Any( None, voluptuous.All(int, voluptuous.Range(min=1)) ), 'rule': turoboro.RULE_DAILY, 'every_nth_day': voluptuous.Range(min=1, max=365), 'except_days': voluptuous.Or( None, voluptuous.All( turoboro.common.is_list_of_days, voluptuous.Length(min=1, max=6) ) ), 'except_months': voluptuous.Or( None, voluptuous.All( turoboro.common.is_list_of_months, voluptuous.Length(min=1, max=11) ) ), 'on_hour': voluptuous.Range(min=0, max=23), 'timezone': voluptuous.In(pytz.all_timezones) }) def __init__(self, start, end_on=None, repeat_n_times=None, every_nth_day=1, except_weekdays=None, except_months=None, on_hour=0, timezone='UTC'): if not isinstance(start, datetime): raise ValueError('You must specify a datetime') tz = pytz.timezone(timezone) start = start.replace(hour=0, minute=0, second=0, microsecond=0) start = tz.localize(start) if isinstance(end_on, datetime): end_on = end_on.replace(hour=0, minute=0, second=0, microsecond=0) end_on = tz.localize(end_on) if end_on < start: raise ValueError('End cannot be before start') self.spec = { "start": start.isoformat(), "end": None, "repeat": None, "rule": "daily", "every_nth_day": 1, "except_days": None, "except_months": None, "on_hour": 0, "timezone": timezone } if repeat_n_times: self.repeat_n_times(repeat_n_times) self.every_nth_day(every_nth_day) try: self.except_weekdays(*except_weekdays) except TypeError: pass try: self.except_months(*except_months) except TypeError: pass self.on_hour(on_hour) if end_on: self.end_on(end_on) @classmethod def factory(cls, spec): daily_rule = cls(datetime.utcnow()) if daily_rule.validate_spec(spec): daily_rule.spec = spec return daily_rule def validate_spec(self, spec): """ Validates the rule specification :param spec: The spec we are attempting to accept :type spec: dict :return: dict """ starting_day = turoboro.common.datetime_from_isoformat(spec['start']) if spec['end'] is not None and spec['end'] <= spec['start']: raise ValueError("End date (%s) must be None or after start date (%s)" % (spec['end'], spec['start'])) if spec['except_days'] is not None and starting_day.weekday() in spec['except_days']: raise ValueError('You may not forbid days that include the start day.') if spec['except_months'] is not None and starting_day.month in spec['except_months']: raise ValueError('You may not forbid months that include the start day') if spec['end'] is not None and spec['repeat'] is not None: raise ValueError('You may not specify both an end date and a repeat count') return self.SPEC_SCHEMA(spec) def every_nth_day(self, n): """ Where `n` is the number of days between two occurrences :param n: The number of days between two occurrences :type n: int :return: turoboro.rules.DailyRule """ self.set_if_valid('every_nth_day', n) return self def except_weekdays(self, *days): """ Where `days` is a tuple of weekdays from 0 to 6. Where 0 is Monday. The recurring rule will skip specified weekdays and move on to the next valid occurrence. :param days: A tuple of integers, designating weekdays from 0-6 (where 0 is Monday) :type days: tuple | None :return: turoboro.rules.DailyRule """ if len(days) == 1 and days[0] is None: days = None self.set_if_valid('except_days', days) return self def _is_allowed(self, dt): if self.spec['except_days'] is not None and dt.weekday() in self.spec['except_days']: return False if self.spec['except_months'] is not None and dt.month in self.spec['except_months']: return False return True def _stagger_forward(self, from_dt): from_dt = from_dt.replace(hour=self.spec['on_hour'], minute=0, second=0, microsecond=0) period = from_dt - self.timezone.localize(turoboro.common.datetime_from_isoformat(self.spec['start'])) rest = period.days % self.spec['every_nth_day'] return from_dt + timedelta(days=self.spec['every_nth_day'] - rest) def _compute_with_end_date(self, from_dt, working_date, return_as): result = [] if from_dt is not None and from_dt != working_date: working_date = self._stagger_forward(from_dt) end_date = self.timezone.localize(turoboro.common.datetime_from_isoformat(self.spec['end'])) while working_date < end_date: if self._is_allowed(working_date): result.append(working_date) working_date = working_date + timedelta(days=self.spec['every_nth_day']) return Result(result, self, return_as=return_as) def _compute_n_times(self, from_dt, working_date, return_as): result = [] count = 0 if from_dt is not None: from_dt = from_dt.replace(hour=self.spec['on_hour'], minute=0, second=0, microsecond=0) while count < self.spec['repeat']: if self._is_allowed(working_date): result.append(working_date) count += 1 working_date = working_date + timedelta(days=self.spec['every_nth_day']) return Result(result, self, return_as=return_as, segment_from=from_dt) def _bounce(self, working_date): return working_date + timedelta(days=self.spec['every_nth_day'])
) return connection.send_message( websocket_api.result_message(msg["id"], _entry_ext_dict(entry)) ) @require_admin @async_response @websocket_api.websocket_command( { vol.Required("type"): "config/entity_registry/update", vol.Required("entity_id"): cv.entity_id, # If passed in, we update value. Passing None will remove old value. vol.Optional("name"): vol.Any(str, None), vol.Optional("icon"): vol.Any(str, None), vol.Optional("area_id"): vol.Any(str, None), vol.Optional("new_entity_id"): str, # We only allow setting disabled_by user via API. vol.Optional("disabled_by"): vol.Any("user", None), } ) async def websocket_update_entity(hass, connection, msg): """Handle update entity websocket command. Async friendly. """ registry = await async_get_registry(hass) if msg["entity_id"] not in registry.entities:
VALIDATE_TRANSITION = 'transition' VALIDATE_WAYPOINTS = 'waypoints' WAYPOINT_LAT_KEY = 'lat' WAYPOINT_LON_KEY = 'lon' WAYPOINT_TOPIC = 'owntracks/{}/{}/waypoint' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_MAX_GPS_ACCURACY): vol.Coerce(float), vol.Optional(CONF_WAYPOINT_IMPORT, default=True): cv.boolean, vol.Optional(CONF_WAYPOINT_WHITELIST): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_SECRET): vol.Any(vol.Schema({vol.Optional(cv.string): cv.string}), cv.string) }) def get_cipher(): """Return decryption function and length of key.""" from libnacl import crypto_secretbox_KEYBYTES as KEYLEN from libnacl.secret import SecretBox def decrypt(ciphertext, key): """Decrypt ciphertext using key.""" return SecretBox(key).decrypt(ciphertext) return (KEYLEN, decrypt)
vol.All(lambda v: v or {}, {util.CONF_STR_KEY: vol.All(lambda v: v or {}, dict)}) ) ########## SCHEDULES SCHEDULE_RULE_SCHEMA = vol.Schema( vol.All( lambda v: v or {}, schedule_rule_pre_hook, { CONF_RULES: lambda v: SCHEDULE_SCHEMA( # type: ignore # pylint: disable=unnecessary-lambda v ), CONF_EXPR: str, CONF_VALUE: object, vol.Optional(CONF_NAME, default=None): vol.Any(str, None), vol.Optional(CONF_START, default=(None, None)): vol.Any( RULE_TIME_VALIDATOR, (None,) ), vol.Optional(CONF_END, default=(None, None)): vol.Any( vol.All( RULE_TIME_VALIDATOR, ( None, datetime.time, vol.Range(min=0, msg="end time can't be shifted backwards"), ), ), (None,), ), vol.Optional(CONF_YEARS): build_range_spec_validator(1970, 2099),
list_indications = list(map(cv.positive_float, indications)) if len(list_indications) < 1: raise vol.Invalid('empty set of indications provided') return list_indications CALCULATE_PUSH_INDICATIONS_SCHEMA = { vol.Required(ATTR_INDICATIONS): indications_validator, vol.Optional(ATTR_IGNORE_PERIOD, default=False): cv.boolean, vol.Optional(ATTR_IGNORE_INDICATIONS, default=False): cv.boolean, vol.Optional(ATTR_INCREMENTAL, default=False): cv.boolean, vol.Optional(ATTR_NOTIFICATION, default=False): vol.Any( cv.boolean, persistent_notification.SCHEMA_SERVICE_CREATE, ) } SERVICE_PUSH_INDICATIONS = 'push_indications' SERVICE_PUSH_INDICATIONS_PAYLOAD_SCHEMA = CALCULATE_PUSH_INDICATIONS_SCHEMA SERVICE_CALCULATE_INDICATIONS = 'calculate_indications' SERVICE_CALCULATE_INDICATIONS_SCHEMA = CALCULATE_PUSH_INDICATIONS_SCHEMA EVENT_CALCULATION_RESULT = DOMAIN + "_calculation_result" EVENT_PUSH_RESULT = DOMAIN + "_push_result" TSensor = TypeVar('TSensor', bound='MESEntity') DiscoveryReturnType = Tuple[List['MoscowPGUSensor'], List[asyncio.Task]]
vol.Optional(CONF_START_TIME): cv.time, vol.Optional(CONF_STOP_TIME): cv.time, vol.Optional(CONF_START_CT, default=4000): vol.All(vol.Coerce(int), vol.Range(min=1000, max=40000)), vol.Optional(CONF_SUNSET_CT, default=3000): vol.All(vol.Coerce(int), vol.Range(min=1000, max=40000)), vol.Optional(CONF_STOP_CT, default=1900): vol.All(vol.Coerce(int), vol.Range(min=1000, max=40000)), vol.Optional(CONF_BRIGHTNESS): vol.All(vol.Coerce(int), vol.Range(min=0, max=255)), vol.Optional(CONF_DISABLE_BRIGHTNESS_ADJUST): cv.boolean, vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.Any(MODE_XY, MODE_MIRED, MODE_RGB), vol.Optional(CONF_INTERVAL, default=30): cv.positive_int, vol.Optional(ATTR_TRANSITION, default=30): VALID_TRANSITION }) def set_lights_xy(hass, lights, x_val, y_val, brightness, transition): """Set color of array of lights.""" for light in lights: if is_on(hass, light): turn_on(hass, light, xy_color=[x_val, y_val], brightness=brightness,
HeartbeatFilter = sensor_ns.class_('HeartbeatFilter', Filter, Component) DeltaFilter = sensor_ns.class_('DeltaFilter', Filter) OrFilter = sensor_ns.class_('OrFilter', Filter) UniqueFilter = sensor_ns.class_('UniqueFilter', Filter) SENSOR_SCHEMA = cv.MQTT_COMPONENT_SCHEMA.extend({ cv.GenerateID(CONF_MQTT_ID): cv.declare_variable_id(MQTTSensorComponent), vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string_strict, vol.Optional(CONF_ICON): cv.icon, vol.Optional(CONF_ACCURACY_DECIMALS): vol.Coerce(int), vol.Optional(CONF_EXPIRE_AFTER): vol.Any(None, cv.positive_time_period_milliseconds), vol.Optional(CONF_FILTERS): FILTERS_SCHEMA, vol.Optional(CONF_ON_VALUE): automation.validate_automation({ cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(SensorStateTrigger), }), vol.Optional(CONF_ON_RAW_VALUE): automation.validate_automation({ cv.GenerateID(CONF_TRIGGER_ID): cv.declare_variable_id(SensorRawStateTrigger), }), vol.Optional(CONF_ON_VALUE_RANGE): automation.validate_automation( {
CONF_IS_POWER, CONF_IS_POWER_FACTOR, CONF_IS_PM1, CONF_IS_PM10, CONF_IS_PM25, CONF_IS_PRESSURE, CONF_IS_REACTIVE_POWER, CONF_IS_SIGNAL_STRENGTH, CONF_IS_SULPHUR_DIOXIDE, CONF_IS_TEMPERATURE, CONF_IS_VOLATILE_ORGANIC_COMPOUNDS, CONF_IS_VOLTAGE, CONF_IS_VALUE, ] ), vol.Optional(CONF_BELOW): vol.Any(vol.Coerce(float)), vol.Optional(CONF_ABOVE): vol.Any(vol.Coerce(float)), } ), cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE), ) async def async_get_conditions( hass: HomeAssistant, device_id: str ) -> list[dict[str, str]]: """List device conditions.""" conditions: list[dict[str, str]] = [] entity_registry = await async_get_registry(hass) entries = [ entry
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_TCP_PORT, default=DEFAULT_WS_PORT): cv.port, vol.Optional(CONF_PROXY_SSL, default=DEFAULT_SSL): cv.boolean, vol.Optional(CONF_TURN_ON_ACTION): cv.SCRIPT_SCHEMA, vol.Optional(CONF_TURN_OFF_ACTION): vol.Any(cv.SCRIPT_SCHEMA, vol.In(DEPRECATED_TURN_OFF_ACTIONS)), vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, vol.Inclusive(CONF_USERNAME, "auth"): cv.string, vol.Inclusive(CONF_PASSWORD, "auth"): cv.string, vol.Optional(CONF_ENABLE_WEBSOCKET, default=True): cv.boolean, }) SERVICE_ADD_MEDIA = "add_to_playlist" SERVICE_CALL_METHOD = "call_method" ATTR_MEDIA_TYPE = "media_type" ATTR_MEDIA_NAME = "media_name"
class Validate(YomboLibrary): """ Performs various tasks at startup. """ def __str__(self): """ Returns the name of the library. :return: Name of the library :rtype: string """ return "Yombo validate library" def _init_(self, **kwargs): pass # Adapted from: # https://github.com/alecthomas/voluptuous/issues/115#issuecomment-144464666 def has_at_least_one_key(self, *keys: str) -> Callable: """Validate that at least one key exists.""" def validate(obj: Dict) -> Dict: """Test keys exist in dict.""" if not isinstance(obj, dict): raise Invalid('expected dictionary') for k in obj.keys(): if k in keys: return obj raise Invalid('must contain one of {}.'.format(', '.join(keys))) return validate def is_device(self, value): """Validate that value is a real device.""" try: os.stat(value) return str(value) except OSError: raise Invalid('No device at {} found'.format(value)) def is_file(self, value: Any) -> str: """Validate that the value is an existing file.""" if value is None: raise Invalid('None is not file') file_in = os.path.expanduser(str(value)) if not os.path.isfile(file_in): raise Invalid('not a file') if not os.access(file_in, os.R_OK): raise Invalid('file not readable') return file_in def ensure_list(self, value: Union[T, Sequence[T]]) -> Sequence[T]: """Wrap value in list if it is not one.""" if value is None: return [] return value if isinstance(value, list) else [value] time_period_dict = vol.All( dict, vol.Schema({ 'days': vol.Coerce(int), 'hours': vol.Coerce(int), 'minutes': vol.Coerce(int), 'seconds': vol.Coerce(int), 'milliseconds': vol.Coerce(int), }), has_at_least_one_key('days', 'hours', 'minutes', 'seconds', 'milliseconds'), lambda value: timedelta(**value)) def time_period_str(self, value: str) -> timedelta: """Validate and transform time offset.""" if isinstance(value, int): raise Invalid('Make sure you wrap time values in quotes') elif not isinstance(value, str): raise Invalid(TIME_PERIOD_ERROR.format(value)) negative_offset = False if value.startswith('-'): negative_offset = True value = value[1:] elif value.startswith('+'): value = value[1:] try: parsed = [int(x) for x in value.split(':')] except ValueError: raise Invalid(TIME_PERIOD_ERROR.format(value)) if len(parsed) == 2: hour, minute = parsed second = 0 elif len(parsed) == 3: hour, minute, second = parsed else: raise Invalid(TIME_PERIOD_ERROR.format(value)) offset = timedelta(hours=hour, minutes=minute, seconds=second) if negative_offset: offset *= -1 return offset def time_period_seconds(self, value: Union[int, str]) -> timedelta: """Validate and transform seconds to a time offset.""" try: return timedelta(seconds=int(value)) except (ValueError, TypeError): raise Invalid('Expected seconds, got {}'.format(value)) time_period = vol.Any(time_period_str, time_period_seconds, timedelta, time_period_dict) def match_all(self, value): """Validate that matches all values.""" return value def positive_timedelta(self, value: timedelta) -> timedelta: """Validate timedelta is positive.""" if value < timedelta(0): raise Invalid('Time period should be positive') return value def _slugify(self, text: str) -> str: """Slugify a given text.""" text = normalize('NFKD', text) text = text.lower() text = text.replace(" ", "_") text = text.translate(TBL_SLUGIFY) text = RE_SLUGIFY.sub("", text) return text def slug(self, value): """Validate value is a valid slug (aka: machine_label)""" if value is None: raise Invalid('Slug should not be None') value = str(value) slg = self._slugify(value) if value == slg: return value raise Invalid('invalid slug {} (try {})'.format(value, slg)) def slugify(self, value): """Coerce a value to a slug.""" # print("going to try to slugify: %s" % value) if value is None: raise Invalid('Slug should not be None') slg = self._slugify(str(value)) if slg: return slg # print("can't make slug: %s" % slg) raise Invalid('Unable to slugify {}'.format(value)) def temperature_unit(self, value) -> str: """Validate and transform temperature unit.""" value = str(value).upper() if value == 'C': return TEMP_CELSIUS elif value == 'F': return TEMP_FAHRENHEIT raise Invalid('invalid temperature unit (expected C or F)') unit_system = vol.All( vol.Lower, vol.Any(MISC_UNIT_SYSTEM_METRIC, MISC_UNIT_SYSTEM_IMPERIAL)) def time(self, value): """Validate time.""" try: return self._Times.time_from_string(value)[0] except Exception: raise Invalid('Invalid time specified: {}'.format(value)) def datetime(self, value): """Validate datetime.""" if isinstance(value, datetime_sys): return value try: return self._Times.time_from_string(value)[0] except Exception: raise Invalid('Invalid datetime specified: {}'.format(value)) def time_zone(self, value): """Validate timezone.""" if self._Times.get_time_zone(value) is not None: return value raise Invalid( 'Invalid time zone passed in. Valid options can be found here: ' 'http://en.wikipedia.org/wiki/List_of_tz_database_time_zones') weekdays = vol.All(ensure_list, [vol.In(WEEKDAYS)]) def socket_timeout(value): """Validate timeout float > 0.0. None coerced to socket._GLOBAL_DEFAULT_TIMEOUT bare object. """ if value is None: return _GLOBAL_DEFAULT_TIMEOUT else: try: float_value = float(value) if float_value > 0.0: return float_value raise Invalid('Invalid socket timeout value.' ' float > 0.0 required.') except Exception as _: raise Invalid('Invalid socket timeout: {err}'.format(err=_)) def x10_address(value): """Validate an x10 address.""" regex = re.compile(r'([A-Pa-p]{1})(?:[2-9]|1[0-6]?)$') if not regex.match(value): raise Invalid('Invalid X10 Address') return str(value).lower() def ensure_list(self, value: Union[T, Sequence[T]]) -> Sequence[T]: """Wrap value in list if it is not one.""" if value is None: return [] return value if isinstance(value, list) else [value] def ensure_list_csv(self, value: Any) -> Sequence: """Ensure that input is a list or make one from comma-separated string.""" if isinstance(value, str): return [member.strip() for member in value.split(',')] return self.ensure_list(value) def is_json(self, value): """ Determine if data is json or not. :param value: :return: """ try: json_object = json.loads(value) except: return False return True def is_msgpack(self, value): """ Helper function to determine if data is msgpack or not. :param mymsgpack: :return: """ try: json_object = msgpack.loads(value) except: return False return True # Validator helpers def key_dependency(self, key, dependency): """Validate that all dependencies exist for key.""" def validator(value): """Test dependencies.""" if not isinstance(value, dict): raise Invalid('key dependencies require a dict') if key in value and dependency not in value: raise Invalid('dependency violation - key "{}" requires ' 'key "{}" to exist'.format(key, dependency)) return value return validator
DEFAULT_CONFIG = { "positions": { "stand": 1.1, "sit": 0.75 }, "mac_address": "AA:AA:AA:AA:AA:AA", } CONFIG_SCHEMA = vol.Schema( { "mac_address": vol.All(str, vol.Length(min=17, max=17)), "positions": { str: vol.All( vol.Any(float, int), vol.Range(min=IdasenDesk.MIN_HEIGHT, max=IdasenDesk.MAX_HEIGHT), ) }, }, extra=False, ) RESERVED_NAMES = {"init", "monitor", "height", "save", "delete"} def save_config(config: dict, path: str = IDASEN_CONFIG_PATH): with open(path, "w") as f: yaml.dump(config, f)
extra=vol.ALLOW_EXTRA, ) ADD_ALL_LINK_SCHEMA = vol.Schema({ vol.Required(SRV_ALL_LINK_GROUP): vol.Range(min=0, max=255), vol.Required(SRV_ALL_LINK_MODE): vol.In([SRV_CONTROLLER, SRV_RESPONDER]), }) DEL_ALL_LINK_SCHEMA = vol.Schema( {vol.Required(SRV_ALL_LINK_GROUP): vol.Range(min=0, max=255)}) LOAD_ALDB_SCHEMA = vol.Schema({ vol.Required(CONF_ENTITY_ID): vol.Any(cv.entity_id, ENTITY_MATCH_ALL), vol.Optional(SRV_LOAD_DB_RELOAD, default=False): cv.boolean, }) PRINT_ALDB_SCHEMA = vol.Schema({vol.Required(CONF_ENTITY_ID): cv.entity_id}) X10_HOUSECODE_SCHEMA = vol.Schema( {vol.Required(SRV_HOUSECODE): vol.In(HOUSECODES)}) TRIGGER_SCENE_SCHEMA = vol.Schema( {vol.Required(SRV_ALL_LINK_GROUP): vol.Range(min=0, max=255)}) ADD_DEFAULT_LINKS_SCHEMA = vol.Schema( {vol.Required(CONF_ENTITY_ID): cv.entity_id})
CONF_TITLE = "title" CONF_REQUIRE_ADMIN = "require_admin" CONF_SHOW_IN_SIDEBAR = "show_in_sidebar" DASHBOARD_BASE_CREATE_FIELDS = { vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, vol.Optional(CONF_ICON): cv.icon, vol.Required(CONF_TITLE): cv.string, vol.Optional(CONF_SHOW_IN_SIDEBAR, default=True): cv.boolean, } DASHBOARD_BASE_UPDATE_FIELDS = { vol.Optional(CONF_REQUIRE_ADMIN): cv.boolean, vol.Optional(CONF_ICON): vol.Any(cv.icon, None), vol.Optional(CONF_TITLE): cv.string, vol.Optional(CONF_SHOW_IN_SIDEBAR): cv.boolean, } STORAGE_DASHBOARD_CREATE_FIELDS = { **DASHBOARD_BASE_CREATE_FIELDS, vol.Required(CONF_URL_PATH): cv.string, # For now we write "storage" as all modes. # In future we can adjust this to be other modes. vol.Optional(CONF_MODE, default=MODE_STORAGE): MODE_STORAGE, } STORAGE_DASHBOARD_UPDATE_FIELDS = DASHBOARD_BASE_UPDATE_FIELDS
SSDP_TARGET = ("239.255.255.250", 1982) SSDP_ST = "wifi_bulb" DISCOVERY_ATTEMPTS = 3 DISCOVERY_SEARCH_INTERVAL = timedelta(seconds=2) DISCOVERY_TIMEOUT = 2 YEELIGHT_RGB_TRANSITION = "RGBTransition" YEELIGHT_HSV_TRANSACTION = "HSVTransition" YEELIGHT_TEMPERATURE_TRANSACTION = "TemperatureTransition" YEELIGHT_SLEEP_TRANSACTION = "SleepTransition" YEELIGHT_FLOW_TRANSITION_SCHEMA = { vol.Optional(ATTR_COUNT, default=0): cv.positive_int, vol.Optional(ATTR_ACTION, default=ACTION_RECOVER): vol.Any(ACTION_RECOVER, ACTION_OFF, ACTION_STAY), vol.Required(ATTR_TRANSITIONS): [{ vol.Exclusive(YEELIGHT_RGB_TRANSITION, CONF_TRANSITION): vol.All(cv.ensure_list, [cv.positive_int]), vol.Exclusive(YEELIGHT_HSV_TRANSACTION, CONF_TRANSITION): vol.All(cv.ensure_list, [cv.positive_int]), vol.Exclusive(YEELIGHT_TEMPERATURE_TRANSACTION, CONF_TRANSITION): vol.All(cv.ensure_list, [cv.positive_int]), vol.Exclusive(YEELIGHT_SLEEP_TRANSACTION, CONF_TRANSITION): vol.All(cv.ensure_list, [cv.positive_int]), }], } DEVICE_SCHEMA = vol.Schema({ vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
ALMOND_SETUP_DELAY = 30 DEFAULT_OAUTH2_HOST = "https://almond.stanford.edu" DEFAULT_LOCAL_HOST = "http://localhost:3000" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Any( vol.Schema( { vol.Required(CONF_TYPE): TYPE_OAUTH2, vol.Required(CONF_CLIENT_ID): cv.string, vol.Required(CONF_CLIENT_SECRET): cv.string, vol.Optional(CONF_HOST, default=DEFAULT_OAUTH2_HOST): cv.url, }), vol.Schema({ vol.Required(CONF_TYPE): TYPE_LOCAL, vol.Required(CONF_HOST): cv.url }), ) }, extra=vol.ALLOW_EXTRA, ) _LOGGER = logging.getLogger(__name__) async def async_setup(hass, config): """Set up the Almond component.""" hass.data[DOMAIN] = {}
vol.Optional(ATTR_PASSWORD): cv.string, vol.Optional(ATTR_AUTHENTICATION): cv.string, }) SERVICE_SCHEMA_SEND_LOCATION = BASE_SERVICE_SCHEMA.extend({ vol.Required(ATTR_LONGITUDE): cv.template, vol.Required(ATTR_LATITUDE): cv.template, }) SERVICE_SCHEMA_EDIT_MESSAGE = SERVICE_SCHEMA_SEND_MESSAGE.extend({ vol.Required(ATTR_MESSAGEID): vol.Any(cv.positive_int, vol.All(cv.string, 'last')), vol.Required(ATTR_CHAT_ID): vol.Coerce(int), }) SERVICE_SCHEMA_EDIT_CAPTION = vol.Schema( { vol.Required(ATTR_MESSAGEID): vol.Any(cv.positive_int, vol.All(cv.string, 'last')), vol.Required(ATTR_CHAT_ID): vol.Coerce(int), vol.Required(ATTR_CAPTION): cv.template, vol.Optional(ATTR_KEYBOARD_INLINE): cv.ensure_list, },
vol.Required(ATTR_LONGITUDE): cv.longitude, }, extra=vol.REMOVE_EXTRA), }, extra=vol.REMOVE_EXTRA) NETWORKS_RESPONSE_SCHEMA = vol.Schema({ vol.Required(ATTR_NETWORKS_LIST): [NETWORK_SCHEMA], }) STATION_SCHEMA = vol.Schema( { vol.Required(ATTR_FREE_BIKES): cv.positive_int, vol.Required(ATTR_EMPTY_SLOTS): vol.Any(cv.positive_int, None), vol.Required(ATTR_LATITUDE): cv.latitude, vol.Required(ATTR_LONGITUDE): cv.longitude, vol.Required(ATTR_ID): cv.string, vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_TIMESTAMP): cv.string, vol.Optional(ATTR_EXTRA): vol.Schema({vol.Optional(ATTR_UID): cv.string}, extra=vol.REMOVE_EXTRA) }, extra=vol.REMOVE_EXTRA)
""" config_keys = set() # type: Set[str] for config in configs: key = config.get(CONF_ID, config[CONF_TYPE]) if key in config_keys: raise vol.Invalid( 'Duplicate mfa module {} found. Please add unique IDs if ' 'you want to have the same mfa module twice'.format( config[CONF_TYPE] )) config_keys.add(key) return configs PACKAGES_CONFIG_SCHEMA = cv.schema_with_slug_keys( # Package names are slugs vol.Schema({cv.string: vol.Any(dict, list, None)}) # Component config ) CUSTOMIZE_DICT_SCHEMA = vol.Schema({ vol.Optional(ATTR_FRIENDLY_NAME): cv.string, vol.Optional(ATTR_HIDDEN): cv.boolean, vol.Optional(ATTR_ASSUMED_STATE): cv.boolean, }, extra=vol.ALLOW_EXTRA) CUSTOMIZE_CONFIG_SCHEMA = vol.Schema({ vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema({cv.entity_id: CUSTOMIZE_DICT_SCHEMA}), vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema({cv.string: CUSTOMIZE_DICT_SCHEMA}), vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema({cv.string: CUSTOMIZE_DICT_SCHEMA}),
_LOGGER = logging.getLogger(__name__) current_domain: ContextVar[str] = ContextVar("current_domain") def _conf_preprocess(value): """Preprocess alternative configuration formats.""" if not isinstance(value, dict): value = {CONF_ENTITIES: value} return value GROUP_SCHEMA = vol.All( vol.Schema({ vol.Optional(CONF_ENTITIES): vol.Any(cv.entity_ids, None), CONF_NAME: cv.string, CONF_ICON: cv.icon, CONF_ALL: cv.boolean, })) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( {cv.match_all: vol.All(_conf_preprocess, GROUP_SCHEMA)}) }, extra=vol.ALLOW_EXTRA, ) class GroupIntegrationRegistry:
CREATE_VACATION_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_VACATION_NAME): vol.All(cv.string, vol.Length(max=12)), vol.Required(ATTR_COOL_TEMP): vol.Coerce(float), vol.Required(ATTR_HEAT_TEMP): vol.Coerce(float), vol.Inclusive( ATTR_START_DATE, "dtgroup", msg=DTGROUP_INCLUSIVE_MSG ): ecobee_date, vol.Inclusive( ATTR_START_TIME, "dtgroup", msg=DTGROUP_INCLUSIVE_MSG ): ecobee_time, vol.Inclusive(ATTR_END_DATE, "dtgroup", msg=DTGROUP_INCLUSIVE_MSG): ecobee_date, vol.Inclusive(ATTR_END_TIME, "dtgroup", msg=DTGROUP_INCLUSIVE_MSG): ecobee_time, vol.Optional(ATTR_FAN_MODE, default="auto"): vol.Any("auto", "on"), vol.Optional(ATTR_FAN_MIN_ON_TIME, default=0): vol.All( int, vol.Range(min=0, max=60) ), } ) DELETE_VACATION_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_VACATION_NAME): vol.All(cv.string, vol.Length(max=12)), } ) RESUME_PROGRAM_SCHEMA = vol.Schema( {
def schema(self): return voluptuous.Schema({ voluptuous.Required(self.MESSAGE): voluptuous.Any(voluptuous.Any(*six.string_types), None) })
) _LOGGER = logging.getLogger(__name__) DELAY_SAVE = 10 WEBHOOK_COMMANDS = Registry() COMBINED_CLASSES = set(BINARY_SENSOR_CLASSES + SENSOR_CLASSES) SENSOR_TYPES = [ATTR_SENSOR_TYPE_BINARY_SENSOR, ATTR_SENSOR_TYPE_SENSOR] WEBHOOK_PAYLOAD_SCHEMA = vol.Schema({ vol.Required(ATTR_WEBHOOK_TYPE): cv.string, vol.Required(ATTR_WEBHOOK_DATA, default={}): vol.Any(dict, list), vol.Optional(ATTR_WEBHOOK_ENCRYPTED, default=False): cv.boolean, vol.Optional(ATTR_WEBHOOK_ENCRYPTED_DATA): cv.string, }) def validate_schema(schema): """Decorate a webhook function with a schema.""" if isinstance(schema, dict): schema = vol.Schema(schema) def wrapper(func): """Wrap function so we validate schema.""" @wraps(func)
BEACON_DEV_ID = "beacon" DEFAULT_OWNTRACKS_TOPIC = "owntracks/#" CONFIG_SCHEMA = vol.Schema( { vol.Optional(DOMAIN, default={}): { vol.Optional(CONF_MAX_GPS_ACCURACY): vol.Coerce(float), vol.Optional(CONF_WAYPOINT_IMPORT, default=True): cv.boolean, vol.Optional(CONF_EVENTS_ONLY, default=False): cv.boolean, vol.Optional( CONF_MQTT_TOPIC, default=DEFAULT_OWNTRACKS_TOPIC ): mqtt.valid_subscribe_topic, vol.Optional(CONF_WAYPOINT_WHITELIST): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_SECRET): vol.Any( vol.Schema({vol.Optional(cv.string): cv.string}), cv.string ), vol.Optional(CONF_REGION_MAPPING, default={}): dict, vol.Optional(CONF_WEBHOOK_ID): cv.string, } }, extra=vol.ALLOW_EXTRA, ) async def async_setup(hass, config): """Initialize OwnTracks component.""" hass.data[DOMAIN] = {"config": config[DOMAIN], "devices": {}, "unsub": None} if not hass.config_entries.async_entries(DOMAIN): hass.async_create_task( hass.config_entries.flow.async_init(
REQUIREMENTS = [ 'https://github.com/bashwork/pymodbus/archive/' 'd7fc4f1cc975631e0a9011390e8017f64b612661.zip#pymodbus==1.2.0' ] # Type of network CONF_BAUDRATE = "baudrate" CONF_BYTESIZE = "bytesize" CONF_STOPBITS = "stopbits" CONF_TYPE = "type" CONF_PARITY = "parity" SERIAL_SCHEMA = { vol.Required(CONF_BAUDRATE): cv.positive_int, vol.Required(CONF_BYTESIZE): vol.Any(5, 6, 7, 8), vol.Required(CONF_METHOD): vol.Any('rtu', 'ascii'), vol.Required(CONF_PORT): cv.string, vol.Required(CONF_PARITY): vol.Any('E', 'O', 'N'), vol.Required(CONF_STOPBITS): vol.Any(1, 2), vol.Required(CONF_TYPE): 'serial', } ETHERNET_SCHEMA = { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.positive_int, vol.Required(CONF_TYPE): vol.Any('tcp', 'udp'), } CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Any(SERIAL_SCHEMA, ETHERNET_SCHEMA)}, extra=vol.ALLOW_EXTRA)
def getSchema(): github_connection = v.Any(str, v.Schema(dict)) return github_connection