def load_scheme(yaml_text): ''' Some optional keys have enforced default values, otherwise use dict.get() ''' schema = MapPattern( Str(), Map({ 'description': Str(), Optional('alias'): Str(), 'version': Str(), 'directives': MapPattern( Str(), Map({ Optional('description'): Str(), 'formats': Seq(Enum(FORMATS)), Optional('prepare'): Map({ Optional('remove_whitespace', default=False): Bool(), Optional('remove_characters', default=['']): Seq(Str()), Optional('strip_characters', default=['']): Seq(Str()), }), Optional('validate'): Map({ Optional('alphabet'): Enum(ALPHABETS), Optional('min_length'): Int(), Optional('max_length'): Int(), }), Optional('target'): Str(), Optional('helper', default=False): Bool() }), ), 'algorithm': Enum(ALGORITHMS), 'encodings': MapPattern( Str(), Map({ 'type': Enum(ENCODINGS), Optional('length', default=0): Int(), Optional('prefix', default=''): Str(), Optional('separator'): Map({ 'character': Str(), 'interval': Int() }) })) })) return load(yaml_text, schema)
def preprocess_parameters_for_cube_creator(elem_args): """ This function does two things: 1) convert class_ids from name: class_ids@text, values: [0, 1, 2, 3] to name: class_ids, values: {"@text": [0, 1, 2, 3]} 2) type conversion for "values" field. Parameters ---------- elem_args: strictyaml.YAML object (contains dict inside) Returns ------- new_elem_args: dict """ for param_portion in elem_args["parameters"]: name = str(param_portion["name"]) if name.startswith("class_ids"): validator = Float() | Seq(Float()) else: validator = Seq(ARTM_TYPES[name]) param_schema = Map({ "name": Str(), "values": validator }) param_portion.revalidate(param_schema)
def _get_route_schema(cls) -> Map: """Schema of the route section.""" return Map({ Optional("range", default=None): cls._get_value_schema(), Optional(CLIMB_PARTS_TAG, default=None): Seq(Map({PHASE_TAG: Str()})), CRUISE_PART_TAG: cls._get_segment_schema(), Optional(DESCENT_PARTS_TAG, default=None): Seq(Map({PHASE_TAG: Str()})), })
def get_type_schema_yaml_validator() -> Map: seq_validator = Seq( Map({ "field": Enum([str(el) for el in Fields]), "condition": Str(), "value": Str() | Seq(Str()), })) return Map({ Optional(str(RequirementTypes.INPUT_REQUIREMENTS)): seq_validator, Optional(str(RequirementTypes.OUTPUT_REQUIREMENTS)): seq_validator, })
def _get_mapping(field: Fields, values: List[Values]) -> Map: base_value_enum = Enum([str(el) for el in values]) if field == Fields.DATA_TYPES: value_enum = base_value_enum | Seq(base_value_enum) elif field == Fields.NUMBER_OF_COLUMNS: value_enum = Int() | Seq(Int()) else: value_enum = base_value_enum conditions = Enum([str(el) for el in field.conditions()]) return Map({ "field": Enum(str(field)), "condition": conditions, "value": value_enum })
def get_world_schema(): """ Getter for world schema :return: schema that is used to verify the world yaml """ return Map({ 'robots': Seq( Map({ 'name': Str(), 'center_x': Int(), 'center_y': Int(), 'orientation': Int(), Optional('type'): Str(), Optional('parts'): Seq(ConfigChecker.get_robot_part_schema()) })), 'board_height': Int(), 'board_width': Int(), 'board_color': CommaSeparated(Int()), 'obstacles': Seq( Map({ 'name': Str(), 'type': Str(), Optional('outer_spacing'): Int(), Optional('depth'): Int(), Optional('color'): CommaSeparated(Int()), Optional('border_width'): Int(), Optional('inner_radius'): Int(), Optional('x'): Int(), Optional('y'): Int(), Optional('width'): Int(), Optional('height'): Int(), Optional('angle'): Int(), Optional('movable'): Bool(), Optional('hole'): Bool(), Optional('radius'): Int(), })), })
def _get_phase_mapping(cls) -> dict: phase_map = { Optional(STEPS_TAG, default=None): Seq(Map(cls._get_segment_mapping())) } phase_map.update(cls._get_base_step_mapping()) return phase_map
def _get_phase_schema(cls) -> Map: """Schema of the phase section.""" phase_map = { Optional(PARTS_TAG, default=None): Seq(cls._get_segment_schema()) } phase_map.update(cls._get_base_part_mapping()) return Map(phase_map)
def get_robot_schema(): """ Getter for robot schema :return: schema that is used to verify the robot yaml """ return Map({ 'parts': Seq(ConfigChecker.get_robot_part_schema()) })
def __init__(self, filename): """Load config from YAML file.""" filename = path.abspath(filename) if filename is None: self._config = [] else: try: with open(filename, 'r') as handle: self._yaml = handle.read() self._config = load( self._yaml, Seq( Map({ Optional("name"): "name", "request": Map({ Optional("path"): Str(), Optional("method"): Enum([ "get", "post", "put", "delete", "GET", "POST", "PUT", "DELETE", ]), Optional("headers"): MapPattern(Str(), Str()), Optional("data"): Str(), }), "response": Map({ "content": Str() | Map({"file": Str()}), Optional("code"): Int(), Optional("headers"): MapPattern(Str(), Str()), }), }))) except Exception as e: sys.stderr.write( "Error reading YAML config file: {0}\n".format(str(e))) sys.exit(1) # Read and store all references to external content files for pair in self._config: content = pair.get('response', {}).get('content') if type(content) != str and "file" in content: with open( path.join(path.dirname(filename), content['file']), 'r') as content_file_handle: pair['response']['content'] = \ content_file_handle.read()
def build_schema_for_cubes(): """ Returns ------- dict each element is str -> strictyaml.Map where key is name of cube, value is a schema used for validation and type-coercion """ schemas = {} for class_of_object in SUPPORTED_CUBES: res = build_schema_from_signature(class_of_object) # "selection" isn't used in __init__, but we will need it later res["selection"] = Seq(Str()) # shortcut for strategy intialization if is_key_in_schema("strategy", res): signature_validation = {} for strategy_class in SUPPORTED_STRATEGIES: local_signature_validation = build_schema_from_signature( strategy_class) signature_validation.update(local_signature_validation) res[Optional("strategy_params")] = Map(signature_validation) # we will deal with "values" later, but we can check at least some simple things already if class_of_object.__name__ == "CubeCreator": element = Map({"name": Str(), "values": Seq(Any())}) res["parameters"] = Seq(element) if class_of_object.__name__ == "RegularizersModifierCube": element = Map({ Optional("name"): Str(), Optional("regularizer"): Any(), Optional("tau_grid"): Seq(Float()) }) res["regularizer_parameters"] = element | Seq(element) res = Map(res) specific_schema = Map({class_of_object.__name__: res}) schemas[class_of_object.__name__] = specific_schema return schemas
def _get_mission_mapping(cls) -> dict: return { "name": Str(), STEPS_TAG: Seq( Map({ Optional(ROUTE_TAG, default=None): Str(), Optional(PHASE_TAG, default=None): Str(), })), }
def parse_args(): # Get config file as required arguemnt and load f = argument.Arguments() f.always("config", help="Machine Config file name") arguments, errors = f.parse() if arguments.get("config") is not None: machine_config = strictyaml.load( Path("machine_config/%s.yaml" % arguments.get("config")).bytes().decode('utf8')).data if machine_config.get("currency"): schema = Map({"denominations": Seq(Str())}) notes_config = strictyaml.load( Path("machine_config/notes_config/%s.yaml" % machine_config.get("currency")).bytes().decode('utf8'), schema).data else: print("Currency must be specified") exit(0) else: print("Config file must be specified") exit(0) valid_true_values = [ 'true', '1', 't', 'y', 'yes', 'yeah', 'yup', 'certainly' ] config = Config() config.NAME = machine_config.get("name") config.BASE_CURRENCY = machine_config.get("currency") config.DEBUG = machine_config.get("debug").lower() in valid_true_values config.CAMERA_METHOD = machine_config.get("camera_method") config.ZBAR_VIDEO_DEVICE = machine_config.get("camera_device") config.RELAY_METHOD = machine_config.get("relay_method") config.MOCK_VALIDATOR = machine_config.get( "mock_validator").lower() in valid_true_values config.ZMQ_URL_MOCK_VALIDATOR = machine_config.get( "zmq_url_mock_validator") config.NOTE_VALIDATOR_NV11 = machine_config.get( "validator_nv11").lower() in valid_true_values config.VALIDATOR_PORT = machine_config.get("validator_port") config.ZMQ_URL_PRICEFEED = machine_config.get("zmq_url_pricefeed") config.NOTES_VALUES = notes_config.get("denominations") config.ZMQ_URL_RPC = machine_config.get("zmq_url_rpc") config.ZMQ_URL_STATUS = machine_config.get("zmq_url_status") config.IS_FULLSCREEN = machine_config.get( "is_fullscreen").lower() in valid_true_values config.DEFAULT_SLIPPAGE = machine_config.get("default_slippage") config.BUY_LIMIT = int(machine_config.get("buy_limit")) if not os.uname()[4].startswith("arm"): config.RELAY_METHOD = RelayMethod.NONE return config
def __init__(self, config_file): # yaml schema schema = Map({"api": Map({"url": Str(), "query": Str()}), "dir": Map({"anime": Str(), "exclude": Seq(Str())})}) # open yaml file and load self.config_file = config_file with open(config_file, "r", encoding="utf8") as f: self.data = load(f.read(), schema) # adding atribute to config for key1 in self.data.data: for key2 in self.data.data[key1]: setattr(self, key1+"_"+key2, self.data.data[key1][key2])
def is_pipelines_config_valid(strictyaml_pipelines: YAML) -> YAML: """ TODO: Refactor to test and analyzer specific config validation. """ pipelines_schema = Map({ "pipelines": Seq( Map({ "name": Str(), "type": Enum(["test", "analyzer"]), Optional("coverage"): Str(), Optional("commands"): Map({ "partial-scope": Str(), "full-scope": Str() }), Optional("dirs"): Seq( Map({ "path": Str(), Optional("full-scope", default=False): Bool() })), Optional("files"): Seq( Map({ "path": Str(), Optional("full-scope", default=False): Bool() })) })) }) try: strictyaml_pipelines.revalidate(pipelines_schema) return True except YAMLValidationError: return False
def get_schema(snippet): if snippet['type'] == "integer": return Int() elif snippet['type'] == "string": return Str() elif snippet['type'] == "array": return Seq(get_schema(snippet["items"])) elif snippet['type'] == "object": map_schema = {} for key, subschema in snippet['properties'].items(): if key in snippet.get('required', []): map_schema[Optional(key)] = get_schema(subschema) else: map_schema[key] = get_schema(subschema) return Map(map_schema)
class Engine(hitchpylibrarytoolkit.Engine): info_definition = InfoDefinition(environments=InfoProperty( Seq(Enum(["gui", "mac", "docker", "headless", "wsl"]))), ) def set_up(self): self._build.ensure_built() for filename, contents in self.given.get('files', {}).items(): filepath = self._build.working.parent.joinpath(filename) if not filepath.dirname().exists(): filepath.dirname().makedirs() filepath.write_text(contents) def screenshot_exists(self, filename): assert self._build.working.joinpath(filename).exists()
def __init__(self, config_file): # yaml schema schema = Map({ "api": Map({ "url": Str(), "query": Str() }), "dir": Map({ "anime": Str(), "exclude": Seq(Str()) }) }) # open yaml file and load self.config_file = config_file with open(config_file, "r", encoding="utf8") as f: self.data = load(f.read(), schema)
def _get_mission_schema(cls) -> Map: """Schema of the mission section.""" return Map({ PARTS_TAG: Seq( Map({ Optional(ROUTE_TAG, default=None): Str(), Optional(PHASE_TAG, default=None): Str(), Optional(RESERVE_TAG, default=None): Map({ "ref": Str(), "multiplier": Float() | Str() }), })), })
def config() -> Config: CONFIG_DIR.mkdir(parents=True, exist_ok=True) config_file = CONFIG_DIR / 'config.yaml' if not config_file.exists(): sys.exit(f'{config_file} not found') restaurant_schema = Map({ 'url': Str(), 'name': Str(), }) schema = Map({ 'restaurants': Seq(restaurant_schema), 'mail': Map({ 'enable': Bool(), 'sender': Str(), 'password': Str(), 'smtp_server': Str(), 'recipients': UniqueSeq(Str()), }), }) yaml = load(config_file.read_text(), schema) _config = Config() for i in yaml.get('restaurants', []): restaurant = Restaurant(i['url'], i['name']) _config.restaurants.append(restaurant) mail_config = Mail( enable=yaml['mail']['enable'], sender=yaml['mail']['sender'], password=yaml['mail']['password'], smtp_server=yaml['mail']['smtp_server'], recipients=yaml['mail']['recipients'], ) _config.mail = mail_config return _config
#tornado_server.bind(server.port) #tornado_server.start(2) #sys.stdout.write(message) #sys.stdout.write("\n") #sys.stdout.flush() #tornado.ioloop.IOLoop.instance().start(1) SCHEMA = Seq( Map({ "request": Map({ "path": Str(), "method": Enum(["get", "post", "put", "delete", "options"]), }), "response": Map({ "code": Int(), "content": Str(), }), }) ) class Response(object): def __init__(self, code, content): self._code = code self._content = content @property def code(self): return self._code
from strictyaml import Map, MapPattern, Optional from strictyaml import Str, Int, Seq, Enum, Any, as_document JSONSCHEMA_TYPE_SNIPPET = { "type": Enum(["object", "integer", "string", "array"]), Optional("required"): Seq(Str()), Optional("properties"): MapPattern(Str(), Any()), Optional("items"): Any(), } JSONSCHEMA_SCHEMA = Map(JSONSCHEMA_TYPE_SNIPPET) def get_schema(snippet): if snippet['type'] == "integer": return Int() elif snippet['type'] == "string": return Str() elif snippet['type'] == "array": return Seq(get_schema(snippet["items"])) elif snippet['type'] == "object": map_schema = {} for key, subschema in snippet['properties'].items(): if key in snippet.get('required', []): map_schema[Optional(key)] = get_schema(subschema) else: map_schema[key] = get_schema(subschema) return Map(map_schema) def load_schema(json_schema):
Map({ "input": Str(), Optional("targetName"): Str() }), Optional(ModelMetadataKeys.MODEL_ID): Str(), Optional(ModelMetadataKeys.DESCRIPTION): Str(), Optional(ModelMetadataKeys.MAJOR_VERSION): Bool(), Optional(ModelMetadataKeys.INFERENCE_MODEL): Map({ Optional("targetName"): Str(), Optional("positiveClassLabel"): Str(), Optional("negativeClassLabel"): Str(), Optional("classLabels"): Seq(Str()), Optional("classLabelsFile"): Str(), Optional("predictionThreshold"): Int(), }), Optional(ModelMetadataKeys.TRAINING_MODEL): Map({Optional("trainOnProject"): Str()}), Optional(ModelMetadataKeys.HYPERPARAMETERS): Any(), Optional(ModelMetadataKeys.VALIDATION_SCHEMA): get_type_schema_yaml_validator(), Optional(ModelMetadataKeys.CUSTOM_PREDICTOR): Any(), }) def validate_config_fields(model_config, *fields):
# -*- coding:utf-8 -*- # author: Xinge from pathlib import Path from strictyaml import Bool, Float, Int, Map, Seq, Str, as_document, load model_params = Map({ "model_architecture": Str(), "output_shape": Seq(Int()), "fea_dim": Int(), "out_fea_dim": Int(), "num_class": Int(), "num_input_features": Int(), "use_norm": Bool(), "init_size": Int(), }) dataset_params = Map({ "dataset_type": Str(), "pc_dataset_type": Str(), "ignore_label": Int(), "return_test": Bool(), "fixed_volume_space": Bool(), "label_mapping": Str(), "max_volume_space": Seq(Float()), "min_volume_space": Seq(Float()), }) train_data_loader = Map({ "data_path": Str(),
class WooSchema: """Schema for localization YAML files.""" # https://github.com/woocart/woocart-defaults/blob/master/src/importers/class-woopage.php#L14 productMeta = { "title": Str(), "description": Str(), Optional("price"): Str(), Optional("category"): Str(), "images": Seq(Str()), } # https://github.com/woocart/woocart-defaults/blob/master/src/importers/class-woopage.php#L14 pageMeta = { "post_title": Str(), Optional("post_name"): Str(), Optional("post_excerpt"): Str(), "post_status": Enum(["draft", "publish"]), "post_type": Enum(["page", "post"]), Optional("post_category"): Str(), Optional("meta_input"): MapPattern(Str(), Str()), Optional("woocart_defaults"): MapPattern(Str(), Str()), } localization = { "woo/woocommerce_default_country": Enum(COUNTRIES), "wp/date_format": Enum(["d/m/Y", "Y-m-d", "F j, Y", "m/d/Y"]), "wp/time_format": Enum(["H:i", "g:i A"]), "wp/start_of_week": Enum(["1", "2", "3", "4", "5", "6", "7"]), "wp/timezone_string": Enum(TIMEZONES), "wp/blog_charset": Enum(["UTF-8"]), "wp/DEFAULT_WPLANG": Enum(WPLANGS), Optional("wp/blogdescription"): Str(), Optional("wp/woocommerce_demo_store_notice"): Str(), "woo/woocommerce_weight_unit": Enum(["kg", "k", "lbs", "oz"]), "woo/woocommerce_dimension_unit": Enum(["m", "cm", "mm", "in", "yd"]), "woo/woocommerce_currency": Enum(CURRENCIES), "woo/woocommerce_currency_pos": Enum(["right_space", "left_space", "left", "right"]), "woo/woocommerce_price_thousand_sep": Enum([".", ","]), "woo/woocommerce_price_decimal_sep": Enum([",", "."]), "woo/woocommerce_price_num_decimals": Enum(["2"]), Optional("woo/woocommerce_tax_classes"): Seq(Str()), "woo/woocommerce_bacs_settings": Map({ "enabled": Bool(), Optional("title"): Str(), Optional("description"): Str(), Optional("instructions"): Str(), Optional("account_name"): Str(), Optional("account_number"): Str(), Optional("sort_code"): Str(), Optional("bank_name"): Str(), Optional("iban"): Str(), Optional("bic"): Str(), Optional("account_details"): Str(), }), "woo/woocommerce_cod_settings": Map({ "enabled": Bool(), Optional("title"): Str(), Optional("description"): Str(), Optional("instructions"): Str(), Optional("enable_for_methods"): Str(), Optional("enable_for_virtual"): Bool(), }), "woo/woocommerce_checkout_privacy_policy_text": Str(), "woo/woocommerce_registration_privacy_policy_text": Str(), ".woo/woocommerce_bacs_settings_format": Enum(["serialized"]), ".woo/woocommerce_cod_settings_format": Enum(["serialized"]), Optional(".woo/woocommerce_tax_classes_format"): Enum(["implode_newline"]), } @staticmethod def load(path: Path, schema_pointer): """Load and validate .yaml file.""" schema = copy.deepcopy(schema_pointer) with path.open() as f: yaml = f.read() data = yaml_load(yaml, Any()) is_template = path.name == "template.yaml" # Replace real Country and Timezone values with fakes if is_template: schema["woo/woocommerce_default_country"] = Enum(["LL"]) schema["wp/timezone_string"] = Enum(["Region/Country"]) schema["wp/DEFAULT_WPLANG"] = Enum(["ll_LL"]) schema["woo/woocommerce_currency"] = Enum(["LLL"]) if "woo/woocommerce_tax_classes" in data: # Inspect that tax classes and taxes match # create enum for taxes from defined tax_classes tax_classes = [ str(tax).lower().replace(" ", "-") for tax in data["woo/woocommerce_tax_classes"] ] # +1 is for standard schema which is never defined in tax class for x in range(len(tax_classes) + 1): # start counting with 1 schema[f"wootax/{x+1}"] = Map({ "country": Enum(["LL"]) if is_template else Enum(COUNTRIES), "state": Str(), "rate": Decimal(), "name": Str(), "priority": Int(), "compound": Int(), "shipping": Int(), "order": Int(), "class": Enum([""]) if x == 0 else Enum(tax_classes), "locations": Map({}), }) try: return yaml_load(yaml, Map(schema), path) except YAMLError: raise return as_document(schema) @staticmethod def load_string(data: bytes, schema, path: str): """Load and validate yaml data.""" try: return yaml_load(data, Map(schema), path) except YAMLError: raise return as_document(schema)
from pathlib import Path from textwrap import dedent import pytest from lxml import html from strictyaml import Enum, Map, Optional, Seq, Str, Url from juniorguru.scrapers.pipelines import sections_parser from juniorguru.scrapers.pipelines.sections_parser import (ListSection, TextFragment) from utils import (load_yaml, param_startswith_skip, param_xfail_missing, startswith_skip) schema = Seq( Map({ Optional('heading'): Str(), 'type': Enum(['paragraph', 'list']), 'contents': Seq(Str()), })) def generate_params(fixtures_dirname): for html_path in (Path(__file__).parent / fixtures_dirname).rglob('*.html'): if startswith_skip(html_path): yield param_startswith_skip(path) else: yml_path = html_path.with_suffix('.yml') if startswith_skip(yml_path): yield param_startswith_skip(path) elif yml_path.is_file(): yield pytest.param(html_path.read_text(),
"executionTimeout": None, "killTimeout": 30, "statsd": None, } _report_schema = Map({ Opt("sentry"): Map({ Opt("dsn"): Map({ Opt("value"): EmptyNone() | Str(), Opt("fromFile"): EmptyNone() | Str(), Opt("fromEnvVar"): EmptyNone() | Str(), }), Opt("fingerprint"): Seq(Str()), Opt("level"): Str(), Opt("extra"): MapPattern(Str(), Str() | Int() | Bool()), Opt("body"): Str(), }), Opt("mail"): Map({ "from": EmptyNone() | Str(), "to": EmptyNone() | Str(), Opt("smtpHost"): Str(), Opt("smtpPort"): Int(), Opt("subject"): Str(),
from .cubes import PerplexityStrategy, GreedyStrategy from .model_constructor import init_simple_default_model import artm from inspect import signature, Parameter from strictyaml import Map, Str, Int, Seq, Any, Optional, Float, EmptyNone, Bool from strictyaml import dirty_load # TODO: use stackoverflow.com/questions/37929851/parse-numpydoc-docstring-and-access-components # for now just hardcode most common / important types ARTM_TYPES = { "tau": Float(), "topic_names": Str() | Seq(Str()) | EmptyNone(), # TODO: handle class_ids in model and in regularizers separately "class_ids": Str() | Seq(Str()) | EmptyNone(), "gamma": Float() | EmptyNone(), "seed": Int(), "num_document_passes": Int(), "num_processors": Int(), "cache_theta": Bool(), "reuse_theta": Bool(), "theta_name": Str() } element = Any() base_schema = Map({ 'regularizers': Seq(element),
from pathlib import Path from strictyaml import Map, Optional, Seq, Str, Url, load from juniorguru.models import Supporter, db schema = Seq(Map({ 'name': Str(), Optional('url'): Url(), })) def main(): path = Path(__file__).parent.parent / 'data' / 'supporters.yml' records = [ dict(last_name=record.data['name'].split()[-1], **record.data) for record in load(path.read_text(), schema) ] with db: Supporter.drop_table() Supporter.create_table() for record in records: Supporter.create(**record) if __name__ == '__main__': main()
"SCHEMA", # Keys to skip. "KEYS_TO_SKIP_IN_COG_INFO", # Key order. "COG_KEYS_ORDER", "REPO_KEYS_ORDER", "SHARED_FIELDS_KEYS_ORDER", ) #: `repo` metadata keys. REPO_KEYS = { "name": Str(), # Downloader doesn't use this, but cogs.red might. "short": Str(), "description": Str(), "install_msg": Str(), "author": Seq(Str()), } #: Metadata keys common to `shared_fields` and `cogs` schemas. COMMON_KEYS = { Optional("min_bot_version"): RedVersion(), Optional("max_bot_version"): RedVersion(), Optional("min_python_version"): PythonVersion(), Optional("hidden", False): Bool(), Optional("disabled", False): Bool(), Optional("type", "COG"): Enum(["COG", "SHARED_LIBRARY"]), } #: `shared_fields` metadata keys. SHARED_FIELDS_KEYS = { "install_msg": Str(),