def main(): data = load_data() start_t = datetime.now() gen = MetadataGenerator( dict_keys_regex=[r"^\d+(?:\.\d+)?$", r"^(?:[\w ]+/)+[\w ]+\.[\w ]+$"], dict_keys_fields=["assets"]) reg = ModelRegistry() fields = gen.generate(data) reg.process_meta_data(fields, model_name="SkillTree") reg.merge_models(generator=gen) reg.generate_names() structure = compose_models(reg.models_map) code = generate_code(structure, AttrsModelCodeGenerator) print(code) print("=" * 10, f"{(datetime.now() - start_t).total_seconds():.4f} seconds", "=" * 10, "\nPress enter to continue...\n") input() structure_flat = compose_models_flat(reg.models_map) code = generate_code(structure_flat, DataclassModelCodeGenerator, class_generator_kwargs={"meta": True}) print(code)
def main(): tabs = stash_tabs() dump_response("pathofexile", "tabs", tabs) tabs = tabs['stashes'] print(f"Start model generation (data len = {len(tabs)})") start_t = datetime.now() # register_datetime_classes() gen = MetadataGenerator() reg = ModelRegistry() fields = gen.generate(*tabs) reg.process_meta_data(fields, model_name="Tab") reg.merge_models(generator=gen) reg.generate_names() # print("Meta tree:") # print(pretty_format_meta(next(iter(reg.models)))) # print("\n" + "=" * 20, end='') structure = compose_models_flat(reg.models_map) # print('\n', json_format([structure[0], {str(a): str(b) for a, b in structure[1].items()}])) # print("=" * 20) print(generate_code(structure, PydanticModelCodeGenerator)) print(f"{(datetime.now() - start_t).total_seconds():.4f} seconds")
def main(): results_data = results() dump_response("f1", "results", results_data) results_data = ("results", results_data) drivers_data = drivers() dump_response("f1", "drivers", drivers_data) drivers_data = ("driver", drivers_data) driver_standings_data = driver_standings() dump_response("f1", "driver_standings", driver_standings_data) driver_standings_data = ("driver_standings", driver_standings_data) register_datetime_classes() gen = MetadataGenerator() reg = ModelRegistry() # for name, data in (results_data, drivers_data, driver_standings_data): for name, data in (driver_standings_data,): fields = gen.generate(*data) reg.process_meta_data(fields, model_name=inflection.camelize(name)) reg.merge_models(generator=gen) reg.generate_names() for model in reg.models: print(pretty_format_meta(model)) print("=" * 20, end='') structure = compose_models_flat(reg.models_map) # print('\n', json_format([structure[0], {str(a): str(b) for a, b in structure[1].items()}])) # print("=" * 20) print(generate_code(structure, PydanticModelCodeGenerator, class_generator_kwargs={}))
def test_self_validate_pydantic(data, data_type): with data.open() as f: data = json.load(f) gen = MetadataGenerator( dict_keys_fields=['files'] ) reg = ModelRegistry() if data_type is not list: data = [data] fields = gen.generate(*data) reg.process_meta_data(fields, model_name="TestModel") reg.merge_models(generator=gen) reg.generate_names() structure = compose_models_flat(reg.models_map) code = generate_code(structure, PydanticModelCodeGenerator) module = imp.new_module("test_models") sys.modules["test_models"] = module try: exec(compile(code, "test_models.py", "exec"), module.__dict__) except Exception as e: assert not e, code import test_models for name in dir(test_models): cls = getattr(test_models, name) if isclass(cls) and issubclass(cls, pydantic.BaseModel): cls.update_forward_refs() for item in data: obj = test_models.TestModel.parse_obj(item) assert obj
def main(): data = load_data() del data["paths"] gen = MetadataGenerator( dict_keys_regex=[], dict_keys_fields=["securityDefinitions", "paths", "responses", "definitions", "properties", "scopes"] ) reg = ModelRegistry(ModelFieldsPercentMatch(.5), ModelFieldsNumberMatch(10)) fields = gen.generate(data) reg.process_meta_data(fields, model_name="Swagger") reg.merge_models(generator=gen) reg.generate_names() structure = compose_models_flat(reg.models_map) code = generate_code(structure, AttrsModelCodeGenerator) print(code)