def parse_tsrc_config(config_path: Path = None, roundtrip: bool = False) -> Config: auth_schema = { schema.Optional("gitlab"): { "token": str }, schema.Optional("github"): { "token": str }, } tsrc_schema = schema.Schema({"auth": auth_schema}) if not config_path: config_path = get_tsrc_config_path() return parse_config(config_path, tsrc_schema, roundtrip=roundtrip)
def main() -> None: args_schema = schema.Schema({ "<spine_path>": schema.Use(pathlib.Path, os.path.exists), "<output_path>": schema.Use(pathlib.Path), "--keep_rate": schema.Use(float), schema.Optional("--log_every_n"): schema.Or(None, schema.Use(int)), "--help": bool, }) args = args_schema.validate(docopt.docopt(__doc__)) gen_ids_from_spine(args)
def main() -> None: args_schema = schema.Schema( { "<input_path>": schema.Use(pathlib.Path, os.path.exists), "<new_output_path>": schema.Or( None, schema.Use(pathlib.Path, os.path.exists) ), "--replace": str, "--accept_all": bool, "--from": schema.Or(None, str), "--help": bool, } ) args = args_schema.validate(docopt.docopt(__doc__)) gen_config(args)
class ExpressionTransformation(BaseTransformation): schema = schema.Schema({"field": str, "expr": str}) def apply(self, row: Dict, **kwargs): result = row.get(self.args["field"], None) if result is not None: f = Symbol("_value_") sympy_exp = parse_expr(self.args["expr"]) result = sympy_exp.evalf(subs={f: result}) try: return float(result) except: return result
def test_roundtrip(tmp_path: Path) -> None: foo_yml = tmp_path / "foo.yml" contents = textwrap.dedent("""\ # important comment foo: 0 """) foo_yml.write_text(contents) foo_schema = schema.Schema({"foo": int}) parsed = tsrc.parse_config(foo_yml, foo_schema, roundtrip=True) parsed["foo"] = 42 tsrc.dump_config(parsed, foo_yml) actual = foo_yml.text() expected = contents.replace("0", "42") assert actual == expected
def delete(self, objective_id, by_user): """Delete an objective. :param objective_id: is the id of the `Objective` to be deleted. :param by_user: the `User` who is deleting the `Objective`. """ delete_schema = {'id': s.Use(int)} o = s.Schema(delete_schema).validate({'id': objective_id}) # DJG - do I need to be using schema here. I just want to check the single id data item objective = self.require_by_id(o['id']) self._check_delete_auth(objective, by_user) db.session.delete(objective) db.session.commit()
def validate(data, set_dir): def is_file(path): return path.is_file() def to_fixture(data): return Fixture(**data) def to_step(data): return Step(**data) def to_test(data): return Test(**data) def absolute_path(path): absolute = Path(os.path.expanduser(path)) if not absolute.is_absolute(): absolute = (set_dir / path).resolve() return absolute def to_command(raw_command): return shlex.split(raw_command.replace('@.', str(set_dir))) fixture = schema.Schema( schema.And( { 'enter': schema.And(str, len), 'exit': schema.And(str, len) }, schema.Use(to_fixture))) fixtures = schema.Schema({schema.And(str, len): fixture}) step = schema.Schema( schema.And( { 'command': schema.And(schema.Const(schema.And(str, len)), schema.Use(to_command)), schema.Optional('input', default=None): schema.And(schema.Use(absolute_path), is_file) }, schema.Use(to_step))) test = schema.Schema( schema.And( { schema.Optional('tags', default=None): [str], schema.Optional('condition', default=None): str, schema.Optional('fixture', default=None): str, 'steps': [step] }, schema.Use(to_test))) tests = schema.Schema({schema.And(str, len): test}) sch = schema.Schema({ schema.Optional('config-file', default=None): schema.Use(absolute_path), schema.Optional('fixtures', default=None): fixtures, 'tests': tests }) return sch.validate(data)
def _format_tyre_dimensions(tyre_dimensions): frt = schema.Schema({ schema.Optional('additional_marks'): schema.Use(str), 'aspect_ratio': schema.Use(float), schema.Optional('carcass'): schema.Use(str), 'rim_diameter': schema.Use(float), schema.Optional('diameter'): schema.Use(float), schema.Optional('load_index'): schema.Use(str), schema.Optional('load_range'): schema.Use(str), 'nominal_section_width': schema.Use(float), schema.Optional('speed_rating'): schema.Use(str), schema.Optional('use'): schema.Use(str), schema.Optional('code'): schema.Use(str), }) m = {k: v for k, v in tyre_dimensions.items() if v is not None} return frt.validate(m)
def validate_repo(data: Any) -> None: copy_schema = {"src": str, schema.Optional("dest"): str} remote_schema = {"name": str, "url": str} repo_schema = schema.Schema({ "src": str, schema.Optional("branch"): str, schema.Optional("copy"): [copy_schema], schema.Optional("sha1"): str, schema.Optional("tag"): str, schema.Optional("remotes"): [remote_schema], schema.Optional("url"): str, }) repo_schema.validate(data) if ("url" in data) and ("remotes" in data): raise schema.SchemaError( "Repo config cannot contain both an url and a list of remotes")
def meta_schema(self) -> schema.Schema: """The schema for which the recipe configuration file (``recipe.toml``) needs to comply for common part (*i.e.* the part of the configuration common to all the recipes)""" return schema.Schema({ "features": [ schema.Or(*( featclass.NAME for featclass in features.RecipeFeature.__subclasses__())), ], # other keys must be dict and will be validated by recipe features # validation methods str: dict, })
def test_post_with_invalid_id(self) -> None: assert Product.objects.count() == 0 for item in ["1", 1]: resp = self.superuser_client.post("/product", {"id": item}) self.assertEqual(400, resp.status_code) data = resp.json() schema.Schema( { "code": "validation_error", "message": str, "fields": { "id": str, }, "non_field": str, }, ).validate(data) self.assertEquals(0, Product.objects.count())
def meta_schema(self) -> schema.Schema: """The schema for which the recipe configuration file (``recipe.toml``) needs to comply for common part (*i.e.* the part of the configuration common to all the recipes)""" return schema.Schema({ "features": [ schema.Or(*(featclass.NAME for featclass in features.RecipeFeature.__subclasses__())), ], # All recipes have SDK except for SDK recipes schema.Optional("sdk"): schema.Regex(RECIPE_IDENTIFIER_RE.pattern), # other keys must be dict and will be validated by recipe features # validation methods str: dict, })
def on_put(self, req, res, userId): data = req.stream.read() data = json.loads(data) sche = schema.Schema("book_reserve") sche.validate(data) book = BookDB() ret = book.book_unreserve(userId, data['books']) if not ret: res.status = httplib.OK res.body = "All books unreserved." else: res.status = httplib.INTERNAL_SERVER_ERROR res.body = "Books failed to unreserve: %s" % ret
def test_patch_invalid_keys(self) -> None: resp = self.superuser_client.patch( f"/product/{self.pr.id}", {"xxxxx": "product_2", "brand_id": str(self.br.id)}, ) data = resp.json() schema.Schema( { "code": "validation_error", "message": str, "fields": { "xxxxx": str, }, "non_field": str, }, ).validate(data)
def import_from_yaml(text: str = None, filepath: str = None, *, ignore_schema: bool = False, ignore_validation: bool = False) -> Statechart: """ Import a statechart from a YAML representation (first argument) or a YAML file (filepath argument). Unless specified, the structure contained in the YAML is validated against a predefined schema (see *sismic.io.SCHEMA*), and the resulting statechart is validated using its *validate()* method. :param text: A YAML text. If not provided, filepath argument has to be provided. :param filepath: A path to a YAML file. :param ignore_schema: set to *True* to disable yaml validation. :param ignore_validation: set to *True* to disable statechart validation. :return: a *Statechart* instance """ if not text and not filepath: raise TypeError( 'A YAML must be provided, either using first argument or filepath argument.' ) elif text and filepath: raise TypeError( 'Either provide first argument or filepath argument, not both.') elif filepath: with open(filepath, 'r') as f: text = f.read() if yaml.version_info < (0, 15): data = yaml.safe_load(text) # type: dict else: yml = yaml.YAML(typ='safe', pure=True) data = yml.load(text) if not ignore_schema: try: data = schema.Schema(SCHEMA.statechart).validate(data) except schema.SchemaError as e: raise StatechartError('YAML validation failed') from e sc = import_from_dict(data) if not ignore_validation: sc.validate() return sc
def main(_): schema_path = os.path.join(_DIR_PATH, FLAGS.schema_file_name) schemas = schema.Schema(schema_path) processor = Processor(schemas) data_path = os.path.join(FLAGS.input_data_dir, 'data.json') with tf.io.gfile.GFile(data_path, 'r') as f: data = json.load(f) dev_test_ids = [] output_dir = FLAGS.output_dir or _DIR_PATH # Generate dev and test set according to the ids listed in the files. Ids not # included in the dev and test id list files belong to the training set. for output_dir_name, file_name in _PATH_MAPPING: output_sub_dir = os.path.join(output_dir, output_dir_name) if not tf.io.gfile.exists(output_sub_dir): tf.io.gfile.makedirs(output_sub_dir) schema_path = os.path.join(output_sub_dir, 'schema.json') schemas.save_to_file(schema_path) dial_ids = [] if file_name: id_list_path = os.path.join(FLAGS.input_data_dir, file_name) with tf.io.gfile.GFile(id_list_path) as f: dial_ids = [id_name.strip() for id_name in f.readlines()] dev_test_ids.extend(dial_ids) else: # Generate the ids for the training set. dial_ids = list(set(data.keys()) - set(dev_test_ids)) converted_dials = processor.convert_to_dstc(dial_ids, data) logging.info('Unfound slot span ratio %s', processor.unfound_slot_span_ratio) logging.info('Writing %d dialogs to %s', len(converted_dials), output_sub_dir) for i in range(0, len(converted_dials), _NUM_DIALS_PER_FILE): file_index = int(i / _NUM_DIALS_PER_FILE) + 1 # Create a new json file and save the dialogues. json_file_path = os.path.join( output_sub_dir, 'dialogues_{:03d}.json'.format(file_index)) dialogs_list = converted_dials[(file_index - 1) * _NUM_DIALS_PER_FILE:file_index * _NUM_DIALS_PER_FILE] with tf.io.gfile.GFile(json_file_path, 'w') as f: json.dump(dialogs_list, f, indent=2, separators=(',', ': '), sort_keys=True) logging.info('Created %s with %d dialogues.', json_file_path, len(dialogs_list))
def __init__(self, dvc_dir): self.dvc_dir = os.path.abspath(os.path.realpath(dvc_dir)) self.config_file = os.path.join(dvc_dir, self.CONFIG) try: self._config = configobj.ConfigObj(self.config_file) # NOTE: schema doesn't support ConfigObj.Section validation, so we # need to convert our config to dict before passing it to schema. self._config = self._lower(self._config) self._config = schema.Schema(self.SCHEMA).validate(self._config) # NOTE: now converting back to ConfigObj self._config = configobj.ConfigObj(self._config, write_empty_values=True) self._config.filename = self.config_file except Exception as ex: raise ConfigError(ex)
def __pulgas_from_config__(cls, config): spec = schema.Or( text_type, # Should be version str pulgas.sub(Pipfile.PackageSpec)) my_schema = schema.Schema({text_type: spec}) validated_config = my_schema.validate(config) def to_spec(value): if isinstance(value, text_type): return Pipfile.PackageSpec(version=value) return value packages = { name: to_spec(value) for name, value in validated_config.items() } return cls(packages=packages)
def load(manifest_path: Path) -> Manifest: gitlab_schema = {"url": str} repo_schema = schema.Use(validate_repo) group_schema = {"repos": [str], schema.Optional("includes"): [str]} manifest_schema = schema.Schema( { "repos": [repo_schema], schema.Optional("gitlab"): gitlab_schema, schema.Optional("groups"): {str: group_schema}, } ) parsed = tsrc.parse_config(manifest_path, manifest_schema) parsed = ManifestConfig(parsed) # type: ignore as_manifest_config = cast(ManifestConfig, parsed) res = Manifest() res.load(as_manifest_config) return res
def ValidateOptionsFiles(self, schemafile, testDir, depth, extension=None, xmlRootNode=None, ignoreValidXMLCheck=False): debug.dprint("Validating options file against schema: " + schemafile, 1) schemafile = os.path.join(self._rootDir, schemafile) self.sch = schema.Schema(schemafile) if not extension is None: debug.dprint("Testing files with extension: " + extension, 1) for filename in self._TestFiles(extension, testDir, depth): self._TestSingle_file(filename, ignoreValidXMLCheck) if not xmlRootNode is None: debug.dprint("Testing xml files with root node: " + xmlRootNode, 1) for filename in self._TestFiles("xml", testDir, depth): try: xmlParse = xml.dom.minidom.parse(filename) except xml.parsers.expat.ExpatError: debug.dprint(filename + " : Fail", 1) self._optionErrors[filename] = (0, 0, 0, 0) continue rootEles = xmlParse.getElementsByTagName(xmlRootNode) if len(rootEles) == 0: continue optionsTree = self.sch.read(filename, root=xmlRootNode, stub=True) lost_eles, added_eles, lost_attrs, added_attrs = self.sch.read_errors( ) if len(lost_eles) + len(added_eles) + len(lost_attrs) + len( added_attrs) == 0: debug.dprint(filename + " : Pass", 1) self._passes += 1 else: debug.dprint(filename + " : Fail", 1) self._optionErrors[filename] = (lost_eles, added_eles, lost_attrs, added_attrs) return
def main() -> None: args_schema = schema.Schema({ schema.Optional("<input_path>"): schema.Or(None, schema.Use(pathlib.Path, os.path.exists)), "<output_path>": schema.Use(pathlib.Path, os.path.exists), schema.Optional("--num_records"): schema.Or(None, schema.Use(int)), "--opportunity_rate": schema.Use(float), "--test_rate": schema.Use(float), "--purchase_rate": schema.Use(float), "--incrementality_rate": schema.Use(float), "--min_ts": schema.Use(int), "--max_ts": schema.Use(int), "--num_conversions": schema.Use(int), "--md5_id": bool, "--help": bool, schema.Optional("--from_header"): schema.Or(None, schema.Use(str)), }) args = args_schema.validate(docopt.docopt(__doc__)) # verify input source arguments if args.get("<input_path>") is None and args.get("--from_header") is None: raise RuntimeError( "Missing input source, please supply either <input_path> or --from_header option" ) if (args.get("<input_path>") is None and args.get("--from_header") is not None and args.get("--num_records") is None): raise RuntimeError( "Missing argument, please specify --num_records with --from_header option" ) _make_input_csv(args)
def load(manifest_path: Path) -> Manifest: remote_git_server_schema = {"url": str} repo_schema = schema.Use(validate_repo) group_schema = {"repos": [str], schema.Optional("includes"): [str]} # Note: gitlab and github_enterprise_url keys are ignored, # and kept here only for backward compatibility reasons manifest_schema = schema.Schema( { "repos": [repo_schema], schema.Optional("gitlab"): remote_git_server_schema, schema.Optional("github_enterprise"): remote_git_server_schema, schema.Optional("groups"): {str: group_schema}, } ) parsed = tsrc.parse_config(manifest_path, manifest_schema) res = Manifest() res.apply_config(parsed) return res
def validate(self, data): """ Check that data has all the attributes specified, and validate each attribute with the schema provided on construction """ for a, s in self.attrs.items(): s = schema.Schema(s) try: value = getattr(data, a) except AttributeError: raise schema.SchemaError(" Missing attribute %r" % a, []) try: new_value = s.validate(value) except schema.SchemaError as e: raise schema.SchemaError( "Invalid value for attribute %r: %s" % (a, e), []) setattr(data, a, new_value) return data
def load(manifest_path): gitlab_schema = {"url": str} copy_schema = {"src": str, schema.Optional("dest"): str} repo_schema = { "src": str, "url": str, schema.Optional("branch"): str, schema.Optional("copy"): [copy_schema], schema.Optional("fixed_ref"): str, } manifest_schema = schema.Schema({ schema.Optional("gitlab"): gitlab_schema, "repos": [repo_schema] }) parsed = tsrc.config.parse_config_file(manifest_path, manifest_schema) res = Manifest() res.load(parsed) return res
def valid_register_schema(): """A schema for the response to 'register' method.""" # { # "result": { # "name": "MyAgent", # "display_name": "My Agent", # "description": "This is my first agent", # "default_options": {"option": "value"} # } # } return schema.Schema({ "result": { "name": str, "display_name": str, "description": str, "default_options": schema.Or({str: object}, {}), } })
class Checkpoint(DefaultTree): """A 'validatable' component which only accepts valid *PMF* model data checkpoints tree structure. Args: strict (bool): Optional. Default to ``True``. Toggle strict validation. When performing strict validation, the configuration file and all registered checkpoints checksum will be matched against the checksum of the corresponding file on the filesystem. verbose (bool): Optional. Default to ``False``. Toggle exhaustive schema offence reporting. """ __schema__ = schema.Schema({str: Path}) # Does it only have files ? def __init__(self, strict=False, verbose=False): super(Checkpoint, self).__init__(self.__schema__, verbose=verbose, strict=strict, default={})
def main(argv: Optional[List[str]] = None) -> None: s = schema.Schema({ "<logs_file_to_analyze>": schema.Use(Path), "--log_path": schema.Or(None, schema.Use(Path)), "--out": schema.Or(None, schema.Use(Path)), "--verbose": bool, "--help": bool, }) arguments = s.validate(docopt(__doc__, argv)) logs_file = arguments["<logs_file_to_analyze>"] log_path = arguments["--log_path"] output_json_path = arguments["--out"] # if log_path specified, logging using FileHandler, or console StreamHandler log_handler = logging.FileHandler( log_path) if log_path else logging.StreamHandler() logging.Formatter.converter = time.gmtime logging.basicConfig( level=logging.INFO, handlers=[log_handler], format= "%(asctime)sZ %(levelname)s t:%(threadName)s n:%(name)s ! %(message)s", ) logger = logging.getLogger(__name__) log_level = logging.DEBUG if arguments["--verbose"] else logging.INFO logger.setLevel(log_level) # Concatenate all arguments to a string, with every argument wrapped by quotes. all_options = f"{sys.argv[1:]}"[1:-1].replace("', '", "' '") # E.g. Command line: log_analyzer 'sample_log/intern-output.txt' '--log_path=a.intern.log' ... logging.info(f"Command line: {Path(__file__).stem} {all_options}") digest = LogDigest(logs_file, logger) run_study = digest.analyze_logs() logger.info(f"Parsed log line count: {run_study.total_line_num}") # pyre-ignore summary_json = run_study.to_json(indent=4) if output_json_path: with open(output_json_path, "w") as outfile: outfile.write(summary_json) else: logger.info(f"Generated run study digest:\n{summary_json}") logger.info(f"Done. Instance count: {len(run_study.instances)}")
def remove(self, objective_id, student_id, tutor_id, by_user): """Remove an objective from a users set of adopted objectives. :param objective_id: is the id of the `Objective` to be removed. :param student_id: is the id of the `User` for whom the `Objective` is being removed. :param tutor_id: is the id of the `User` who is removing the `Objective`. :param by_user: is the `User` who is calling the action. """ remove_schema = {'id': s.Use(int), 'student_id': s.Use(int), 'tutor_id': s.Use(int) } s.Schema(remove_schema).validate({'id': objective_id, 'student_id': student_id, 'tutor_id': tutor_id}) self._check_user_id_or_admin(tutor_id, by_user) UserObjective.ignore_or_delete(student_id, tutor_id, objective_id)
def _validate(cls, thing): from_config = getattr(cls, '__pulgas_from_config__', _MISSING) if from_config is not _MISSING: return from_config(thing) config_name_to_name = { attribute.metadata[PULGAS_SCHEMA].real_name or attribute.name: attribute.name for attribute in cls.__attrs_attrs__ } schema = schemalib.Schema({ _get_schema_key(attribute): attribute.metadata[PULGAS_SCHEMA].schema for attribute in cls.__attrs_attrs__ }) processed = schema.validate(thing) processed_named = { config_name_to_name[name]: value for name, value in processed.items() } return cls(**processed_named)
def validate(self, data): # Check the basic properties schema.Schema({ "url": bool, "value": string_schema, }).validate(data) # For url we are done if data["url"]: return data # Check that if non url we have the link in the API if data["value"] not in self.api: raise schema.SchemaError("Link value %r not found in the API " "valid keys are %r" % (data, self.api.keys())) return data