def disk_space(**kwargs): # This setting is only used with the space filtertype and is required return {Required('disk_space'): Any(Coerce(float))}
return schemas[""](data) if parsed.scheme not in schemas: raise Invalid(f"Unsupported URL type {parsed.scheme}://") return schemas[parsed.scheme](data) return validate class RelPath(str): pass REMOTE_COMMON = { "url": str, "checksum_jobs": All(Coerce(int), Range(1)), Optional("no_traverse"): Bool, # obsoleted "verify": Bool, } LOCAL_COMMON = { "type": supported_cache_type, Optional("protected", default=False): Bool, # obsoleted "shared": All(Lower, Choices("group")), Optional("slow_link_warning", default=True): Bool, } HTTP_COMMON = { "auth": All(Lower, Choices("basic", "digest", "custom")), "custom_auth_header": str, "user": str, "password": str, "ask_password": Bool,
from .defaults import EXPIRE_AFTER, FACE_RECOGNITION_PATH def get_default_model() -> str: if os.getenv(ENV_CUDA_SUPPORTED) == "true": return "cnn" return "hog" SCHEMA = BASE_SCHEMA.extend({ Optional("face_recognition_path", default=FACE_RECOGNITION_PATH): str, Optional("expire_after", default=EXPIRE_AFTER): All(Any(All(int, Range(min=0)), All(float, Range(min=0.0))), Coerce(float)), Optional("model", default=get_default_model()): Any("hog", "cnn"), }) LOGGER = logging.getLogger(__name__) class Config(PostProcessorConfig): def __init__(self, post_processors_config, processor_config): super().__init__(post_processors_config, processor_config) self._face_recognition_path = processor_config["face_recognition_path"] self._expire_after = processor_config["expire_after"] self._model = processor_config["model"] @property
def timeout(action): # if action == 'reindex': value = 60 return {Optional('timeout', default=value): Any(Coerce(int), None)}
# NOTE: LocalOutput is the default choice ] OUTS_MAP = { Schemes.HDFS: HDFSOutput, Schemes.S3: S3Output, Schemes.GS: GSOutput, Schemes.SSH: SSHOutput, Schemes.LOCAL: LocalOutput, Schemes.WEBHDFS: WebHDFSOutput, } CHECKSUM_SCHEMA = Any( None, And(str, Length(max=0), SetTo(None)), And(Any(str, And(int, Coerce(str))), Length(min=3), Lower), ) # NOTE: currently there are only 3 possible checksum names: # # 1) md5 (LOCAL, SSH, GS); # 2) etag (S3); # 3) checksum (HDFS); # # so when a few types of outputs share the same name, we only need # specify it once. CHECKSUMS_SCHEMA = { LocalTree.PARAM_CHECKSUM: CHECKSUM_SCHEMA, S3Tree.PARAM_CHECKSUM: CHECKSUM_SCHEMA, HDFSTree.PARAM_CHECKSUM: CHECKSUM_SCHEMA, WebHDFSTree.PARAM_CHECKSUM: CHECKSUM_SCHEMA,
def retry_count(): return { Optional('retry_count', default=3): All(Coerce(int), Range(min=0, max=100)) }
def count(): return {Required('count'): All(Coerce(int), Range(min=0, max=10))}
from voluptuous import All, Any, Maybe, Coerce, Length, Range, Match, Required, Optional, Schema SERVER_CONF_VALIDATOR = Schema({ Required('status'): Coerce(str), Required('message'): Coerce(str), Optional('gateway_uid'): All(Coerce(str), Match(r'^0x[0-9A-F]{8}$')), Optional('mqtt_topic', default='ota/global'): Coerce(str), Optional('mqtt_broker', default='broker.hivemq.com'): Coerce(str), Optional('end_device_multicast_addr', default='230.6.6.1:7777'): All( Coerce(str), Match( r'^(22[4-9]|230)(\.([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])){3}:\d{3,4}$' )), Optional('max_log_size', default='2'): All(Coerce(str), Match(r'^\d{1,2}$')), Optional('max_log_count', default='5'): All(Coerce(str), Match(r'^\d{1,2}$')), }) END_DEVICE_CONF_VALIDATOR = Schema({ Required('code'): Coerce(str), Required('id'): All(Coerce(str, msg='Invalid variable type, expected str'), Length(min=8, max=30, msg='Invalid Length, expected 8-30 char'),
data=cards, )) @bp.route('/repos') @login_required @with_pagination def get_own_repos(): repos = Repository.query.filter_by(user_id=g.user.id) \ .limit(g.limit).offset(g.offset).all() return jsonify(repos) common_repo_schema = { Required('desc'): Any(str, None), Required('private'): Coerce(bool), Required('sides'): Range(min=2, max=6), Required('side_a_name'): Any(str, None), Required('side_b_name'): Any(str, None), Required('side_c_name'): Any(str, None), Required('side_d_name'): Any(str, None), Required('side_e_name'): Any(str, None), Required('side_f_name'): Any(str, None), } @bp.route('/repos', methods=['POST']) @login_required def create_repo(): schema = { Required('name'): All(
return schemas[""](data) if parsed.scheme not in schemas: raise Invalid(f"Unsupported URL type {parsed.scheme}://") return schemas[parsed.scheme](data) return validate class RelPath(str): pass REMOTE_COMMON = { "url": str, "checksum_jobs": All(Coerce(int), Range(1)), "jobs": All(Coerce(int), Range(1)), Optional("no_traverse"): Bool, # obsoleted "verify": Bool, } LOCAL_COMMON = { "type": supported_cache_type, Optional("protected", default=False): Bool, # obsoleted "shared": All(Lower, Choices("group")), Optional("slow_link_warning", default=True): Bool, } HTTP_COMMON = { "auth": All(Lower, Choices("basic", "digest", "custom")), "custom_auth_header": str, "user": str, "password": str,
return self.db.execute('update goods_property set value=%s where goods_id=%s and name="is_wx_on_sale"', operator, goods_id) self.write({'is_ok': True}) list_schema = Schema({ 'supplier': str, 'goods': str, 'status': str, }, extra=True) add_schema = Schema({ 'type': Any('E', 'R'), 'generate_type': Any('GENERATE', 'IMPORT'), 'category_id': Coerce(int), 'on_sale_at': Datetime(), 'off_sale_at': Datetime(), 'expire_at': Any(Datetime(), EmptyNone()), 'properties': All(EmptyList(), Unique()), 'sms_name': All(Decode(), Length(min=1, max=50)), 'short_name': All(Decode(), Length(min=1, max=50)), 'face_value': All(Coerce(Decimal), Range(min=Decimal('0.0'))), 'sales_price': All(Coerce(Decimal), Range(min=Decimal('0.0'))), 'purchase_price': All(Coerce(Decimal), Range(min=Decimal('0.0'))), 'postage': Any(All(Coerce(Decimal), Range(min=Decimal('0.0'))), EmptyNone()), 'stock': All(Coerce(int), Range(min=1)), 'max_buy': All(Coerce(int), Range(min=0)), 'all_shop': Coerce(int), 'shops': All(EmptyList(), Unique(), ListCoerce(int)), 'img_path': str,
str, Optional("audio_codec", default="unset"): Maybe(str), Optional("rtsp_transport", default="tcp"): Any("tcp", "udp", "udp_multicast", "http"), Optional("filter_args", default=[]): list, Optional("pix_fmt", default="nv12"): Any("nv12", "yuv420p"), Optional("frame_timeout", default=60): int, }) MJPEG_STREAM_SCHEMA = Schema({ Optional("width", default=0): All(Any(int, str), Coerce(int)), Optional("height", default=0): All(Any(int, str), Coerce(int)), Optional("draw_objects", default=False): Any(str, bool, bytes), Optional("draw_motion", default=False): Any(str, bool, bytes), Optional("draw_motion_mask", default=False): Any(str, bool, bytes), Optional("draw_object_mask", default=False): Any(str, bool, bytes), Optional("draw_zones", default=False): Any(str, bool, bytes), Optional("rotate", default=0): All(Any(int, str), Coerce(int)), Optional("mirror", default=False):
Optional('device-tree-origin', default='gadget'): str, Optional('device-tree'): str, Optional('format'): YAMLFormat, Required('volumes'): { Match('^[-a-zA-Z0-9]+$'): Schema({ Optional('schema', default='gpt' if has_new_voluptuous() else VolumeSchema.gpt): Enumify(VolumeSchema), Optional('bootloader'): Enumify(BootLoader, preprocessor=methodcaller('replace', '-', '')), Optional('id'): Coerce(Id), Required('structure'): [ Schema({ Optional('name'): str, Optional('offset'): Coerce(as_size), Optional('offset-write'): Any(Coerce(Size32bit), RelativeOffset), Required('size'): Coerce(as_size), Required('type'): Any('mbr', 'bare', Coerce(HybridId)), Optional('role'): Enumify(StructureRole, preprocessor=methodcaller('replace', '-', '_')),
def epoch(**kwargs): # This setting is only used with the age filtertype. return {Optional('epoch', default=None): Any(Coerce(int), None)}
def number_of_shards(): return { Optional('number_of_shards', default=1): All(Coerce(int), Range(min=1, max=99)) }
def test_schema(self): """test/merge a schema to loaded configuration""" # schema for what the 'some_service' configuration # ought to look like. schema = Schema({ 'some_service': { 'host': basestring, 'port': Coerce(int), Required('pool_size', default=5): All(Coerce(int), Range(min=1, max=20)), 'credentials': { 'username': basestring, 'password': basestring } } }) cd = ConfigDict() cd.register_trigger(SchemaTrigger(schema)) cd.merge_dict({ 'some_service': { 'host': 'xyz', 'port': 123, 'credentials': { 'username': '******', 'password': '******' } } }) cd.configure() self.assertEquals(cd.some_service.host, 'xyz') self.assertEquals(cd.some_service.port, 123) self.assertEquals(cd.some_service.pool_size, 5) self.assertEquals(cd.some_service.credentials.username, 'foo') self.assertEquals(cd.some_service.credentials.password, 'bar') # integer coersion should take care of '123' instead of 123 cd = ConfigDict() cd.register_trigger(SchemaTrigger(schema)) cd.merge_dict({ 'some_service': { 'host': 'xyz', 'port': '123', 'credentials': { 'username': '******', 'password': '******' } } }) cd.configure() self.assertEquals(cd.some_service.host, 'xyz') self.assertEquals(cd.some_service.port, 123) self.assertEquals(cd.some_service.pool_size, 5) self.assertEquals(cd.some_service.credentials.username, 'foo') self.assertEquals(cd.some_service.credentials.password, 'bar') cd = ConfigDict() cd.register_trigger(SchemaTrigger(schema)) cd.merge_dict({ 'some_service': { 'host': 'xyz', 'port': 123, 'pool_size': 21, 'credentials': { 'username': '******', 'password': '******' } } }) # not valid -- pool_size out of range with self.assertRaises(MultipleInvalid): cd.configure()
def requests_per_second(): return { Optional('requests_per_second', default=-1): Any(-1, Coerce(int), None) }
name, binary)) logger.debug("Found {} Binary: {}".format(name, os.path.abspath(binary))) return binary cudaDeconSchema = Schema( { Required('input-dir'): dirpath, Required('otf-file'): filepath, Required('filename-pattern'): str, 'drdata': All(Coerce(float), Range(0.01, 0.5), msg='Data pixel size (drdata) must be float between 0.01 - 0.5'), 'dzdata': All(Coerce(float), Range(0, 50), msg='Data Z step size (dzdata) must be float between 0 - 50'), 'drpsf': All(Coerce(float), Range(0.01, 0.5), msg='PSF pixel size (drpsf) must be float between 0.01 - 0.5'), 'dzpsf': All(Coerce(float), Range(0, 50), msg='PSF Z step size (dzpsf) must be float between 0 - 50'), 'wavelength':
def retry_interval(): return { Optional('retry_interval', default=120): All(Coerce(int), Range(min=1, max=600)) }
class UploadOrder(APIView): _filter_null = True _schema = Schema({ Required("mail_pd_id"): only_num_id, Required("receiver"): Coerce(str), Required("order_status"): Coerce(int), Optional("order_id"): str, Optional("apply_time"): str, Optional("wangwang_id"): str, Optional("goods_id"): str, Optional("return_pd_id"): str, Optional("return_pd_company"): str, Optional("comment"): str, Optional("upload_order_img"): Any(FileStorage, str), Optional("action"): str }) def post(self, request): args_map = {key: value if value != "空" else None for key, value in request.req_args.items()} edit_action = args_map.get("action") select_sql = """SELECT `id` FROM `tb_order_info` WHERE `mail_pd_id`=?""" _id = self.db.get_value(select_sql, (args_map["mail_pd_id"], )) if not edit_action and _id: raise APIException(code_msg.CODE_DATA_EXIST) insert_sql = """INSERT INTO `tb_order_info`(`mail_pd_id`, `receiver`, `order_status`, `order_id`, `apply_time`, `wangwang_id`, `goods_id`, `return_pd_company`, `return_pd_id`) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""" if edit_action: img_id = _id else: result = self.db.execute(insert_sql, (args_map["mail_pd_id"], args_map["receiver"], args_map["order_status"], args_map["order_id"], args_map["apply_time"], args_map["wangwang_id"], args_map["goods_id"], args_map["return_pd_company"], args_map["return_pd_id"])) img_id = result.lastrowid file_obj = args_map.get("upload_order_img") if file_obj: try: filename = self._upload_images(file_obj, img_id) self.db.execute("""UPDATE `tb_order_info` SET `img_name`=? WHERE `id`=?""", (filename, img_id)) except: self.db.execute("""DELETE FROM tb_order_info WHERE `id`=?""", (img_id, )) if os.path.exists(self.upload_img_path): os.remove(self.upload_img_path) raise APIException(code_msg.CODE_UPLOAD_ORDER_ERROR) if edit_action: update_sql = """UPDATE `tb_order_info` SET `receiver`=?, `order_status`=?, `order_id`=?, `apply_time`=?, """ \ """`wangwang_id`=?, `goods_id`=?, `return_pd_company`=?, `return_pd_id`=?, `comment`=?, """ \ """`update_time`=? WHERE `mail_pd_id`=? AND `is_delete`=0""" self.db.execute(update_sql, (args_map["receiver"], args_map["order_status"], args_map["order_id"], args_map["apply_time"], args_map["wangwang_id"], args_map["goods_id"], args_map["return_pd_company"], args_map["return_pd_id"], args_map["comment"], get_dt(), args_map["mail_pd_id"], )) def _upload_images(self, file_obj, img_id): filename = file_obj.filename file_extensions = filename.rsplit('.', 1)[1] if '.' not in filename and file_extensions not in current_app.config["ALLOWED_IMG_EXTENSIONS"]: raise APIException(code_msg.CODE_UPLOAD_ORDER_ERROR) self.upload_img_path = os.path.join(make_dir(current_app.config["UPLOAD_IMG_PATH"]), str(img_id)+"."+file_extensions) file_obj.save(self.upload_img_path) return filename
def slices(): return { Optional('slices', default=1): Any(All(Coerce(int), Range(min=1, max=500)), None) }
return "darknet" if os.getenv(ENV_RASPBERRYPI3) == "true": return "edgetpu" if os.getenv(ENV_RASPBERRYPI4) == "true": return "edgetpu" return "darknet" LABELS_SCHEMA = Schema([ All( deprecated("triggers_recording", replacement="trigger_recorder"), { Required("label"): str, Optional("confidence", default=0.8): All(Any(0, 1, All(float, Range(min=0.0, max=1.0))), Coerce(float)), Optional("height_min", default=0.0): All(Any(0, 1, All(float, Range(min=0.0, max=1.0))), Coerce(float)), Optional("height_max", default=1.0): All(Any(0, 1, All(float, Range(min=0.0, max=1.0))), Coerce(float)), Optional("width_min", default=0.0): All(Any(0, 1, All(float, Range(min=0.0, max=1.0))), Coerce(float)), Optional("width_max", default=1.0): All(Any(0, 1, All(float, Range(min=0.0, max=1.0))), Coerce(float)), Optional("trigger_recorder", default=True): bool, Optional("require_motion", default=False): bool, Optional("post_processor", default=None): Any(str, None), },
def delay(): return { Optional('delay', default=0): All(Coerce(float), Range(min=0.0, max=3600.0)) }
class MockProvisioner1(object): name = 'mp1' schema = {'a': str, 'b': Coerce(int)}
def lb_dev_state(x): if x in ["normal"]: return x raise Invalid(f"Invalid dev_state {x}") TZ_SCHEMA = Schema( {"zone_str": str, "dst_offset": int, "index": All(int, Range(min=0)), "tz_str": str} ) CURRENT_CONSUMPTION_SCHEMA = Schema( Any( { "voltage": Any(All(float, Range(min=0, max=300)), None), "power": Any(Coerce(float, Range(min=0)), None), "total": Any(Coerce(float, Range(min=0)), None), "current": Any(All(float, Range(min=0)), None), "voltage_mv": Any( All(float, Range(min=0, max=300000)), int, None ), # TODO can this be int? "power_mw": Any(Coerce(float, Range(min=0)), None), "total_wh": Any(Coerce(float, Range(min=0)), None), "current_ma": Any( All(float, Range(min=0)), int, None ), # TODO can this be int? }, None, ) )
def max_num_segments(): return { Required('max_num_segments'): All(Coerce(int), Range(min=1, max=32768)) }
def argsSchema(args): """Convert the argument values from text to the correct type""" schema = Schema({ '<charslist>': str, # FIXME: Use IsFile to check if file exists '<dictionary>': Or(None, str), '--lesson-number': Or(None, Coerce(int)), '--output': Or(None, str), '--word-wrap': Or(None, Coerce(int)), '--characters-per-lesson': Or(None, Coerce(int)), '--min-word-length': Or(None, Coerce(int)), '--max-word-length': Or(None, Coerce(int)), '--symbols-density': Or(None, Coerce(float)), '--previous-symbols-fraction': Or(None, Coerce(float)), '--numbers-density': Or(None, Coerce(float)), '--max-number-length': Or(None, Coerce(int)), '--max-letters-combination-length': Or(None, Coerce(int)), '--lesson-title-prefix': Or(None, str), '--crop-dict': Or(None, Coerce(int)), str: Boolean() # Treat all other arguments as bool }) try: args = schema(args) except error.MultipleInvalid as ex: print("\n".join([e.msg for e in ex.errors])) return args
def number_of_replicas(): return { Optional('number_of_replicas', default=1): All(Coerce(int), Range(min=0, max=10)) }
""" from voluptuous import Coerce from voluptuous import Optional from voluptuous import Schema SBP_EXTENSION = "/*.yaml" filename = Schema(str) identifier = Schema(str) description = Schema(str) include = Schema([filename]) bit = Schema(int) type_identifier = Schema(str) sbp_identifier = Schema(int) units = Coerce(str) bitmask = Coerce(str) size = Schema(int) bitfield = Schema([{ Optional(bitmask): { Optional('units'): units, Optional('values'): [{ bit: description }], Optional('desc'): description } }]) field = Schema({ identifier: { Optional('units'): units,
def unit_count(**kwargs): # This setting is only used with the age filtertype, or with the space # filtertype if use_age is set to True. return {Required('unit_count'): Coerce(int)}