def main(): tabs = stash_tabs() dump_response("pathofexile", "tabs", tabs) tabs = tabs['stashes'] print(f"Start model generation (data len = {len(tabs)})") start_t = datetime.now() # register_datetime_classes() gen = MetadataGenerator() reg = ModelRegistry() fields = gen.generate(*tabs) reg.process_meta_data(fields, model_name="Tab") reg.merge_models(generator=gen) reg.generate_names() # print("Meta tree:") # print(pretty_format_meta(next(iter(reg.models)))) # print("\n" + "=" * 20, end='') structure = compose_models_flat(reg.models_map) # print('\n', json_format([structure[0], {str(a): str(b) for a, b in structure[1].items()}])) # print("=" * 20) print(generate_code(structure, PydanticModelCodeGenerator)) print(f"{(datetime.now() - start_t).total_seconds():.4f} seconds")
def test_self_validate_pydantic(data, data_type): with data.open() as f: data = json.load(f) gen = MetadataGenerator( dict_keys_fields=['files'] ) reg = ModelRegistry() if data_type is not list: data = [data] fields = gen.generate(*data) reg.process_meta_data(fields, model_name="TestModel") reg.merge_models(generator=gen) reg.generate_names() structure = compose_models_flat(reg.models_map) code = generate_code(structure, PydanticModelCodeGenerator) module = imp.new_module("test_models") sys.modules["test_models"] = module try: exec(compile(code, "test_models.py", "exec"), module.__dict__) except Exception as e: assert not e, code import test_models for name in dir(test_models): cls = getattr(test_models, name) if isclass(cls) and issubclass(cls, pydantic.BaseModel): cls.update_forward_refs() for item in data: obj = test_models.TestModel.parse_obj(item) assert obj
def main(): SYMBOL = "The Lord of the Rings" search_result = search(SYMBOL) dump_response("openlibrary", "search", search_result) search_result = search_result['docs'] books = [ get_book(item['isbn'][0]) for item in search_result if item.get('isbn', None) ] dump_response("openlibrary", "book", books[0]) gen = MetadataGenerator() reg = ModelRegistry() reg.process_meta_data(gen.generate(*search_result), model_name="Search") reg.process_meta_data(gen.generate(*books), model_name="Book") reg.merge_models(generator=gen) print("\n" + "=" * 20) for model in reg.models: model.generate_name() structure = compose_models(reg.models_map) print(generate_code(structure, AttrsModelCodeGenerator))
def main(): data = load_data() start_t = datetime.now() gen = MetadataGenerator( dict_keys_regex=[r"^\d+(?:\.\d+)?$", r"^(?:[\w ]+/)+[\w ]+\.[\w ]+$"], dict_keys_fields=["assets"]) reg = ModelRegistry() fields = gen.generate(data) reg.process_meta_data(fields, model_name="SkillTree") reg.merge_models(generator=gen) reg.generate_names() structure = compose_models(reg.models_map) code = generate_code(structure, AttrsModelCodeGenerator) print(code) print("=" * 10, f"{(datetime.now() - start_t).total_seconds():.4f} seconds", "=" * 10, "\nPress enter to continue...\n") input() structure_flat = compose_models_flat(reg.models_map) code = generate_code(structure_flat, DataclassModelCodeGenerator, class_generator_kwargs={"meta": True}) print(code)
def main(): chroniclingamerica_data = chroniclingamerica() dump_response("other", "chroniclingamerica", chroniclingamerica_data) launchlibrary_data = launchlibrary() dump_response("other", "launchlibrary", launchlibrary_data) university_domains_data = university_domains() dump_response("other", "university_domains", university_domains_data) gen = MetadataGenerator() reg = ModelRegistry() fields = gen.generate(chroniclingamerica_data) reg.process_meta_data(fields, model_name="CHRONICLING") fields = gen.generate(launchlibrary_data) reg.process_meta_data(fields, model_name="LaunchLibrary") fields = gen.generate(*university_domains_data) reg.process_meta_data(fields, model_name="Universities") reg.merge_models(generator=gen) reg.generate_names() structure = compose_models(reg.models_map) print(generate_code(structure, AttrsModelCodeGenerator))
def main(): results_data = results() dump_response("f1", "results", results_data) results_data = ("results", results_data) drivers_data = drivers() dump_response("f1", "drivers", drivers_data) drivers_data = ("driver", drivers_data) driver_standings_data = driver_standings() dump_response("f1", "driver_standings", driver_standings_data) driver_standings_data = ("driver_standings", driver_standings_data) register_datetime_classes() gen = MetadataGenerator() reg = ModelRegistry() # for name, data in (results_data, drivers_data, driver_standings_data): for name, data in (driver_standings_data,): fields = gen.generate(*data) reg.process_meta_data(fields, model_name=inflection.camelize(name)) reg.merge_models(generator=gen) reg.generate_names() for model in reg.models: print(pretty_format_meta(model)) print("=" * 20, end='') structure = compose_models_flat(reg.models_map) # print('\n', json_format([structure[0], {str(a): str(b) for a, b in structure[1].items()}])) # print("=" * 20) print(generate_code(structure, PydanticModelCodeGenerator, class_generator_kwargs={}))
def test_string_serializable_registry_order(): r2 = StringSerializableRegistry() gen = MetadataGenerator(r2) r2.add(cls=IsoTimeString) r2.add(cls=IntString) r2.add(replace_types=(IntString, ), cls=FloatString) assert gen._detect_type("12") != IntString r2.remove(IsoTimeString) r2.add(cls=IsoTimeString) assert gen._detect_type("12") == IntString assert gen._detect_type("12:14") == IsoTimeString
def test_convert(models_generator: MetadataGenerator): data = { "dict_field": {}, "another_dict_field": { "test_dict_field_a": 1, "test_dict_field_b": "a" }, "another_dict_field_2": { "test_dict_field_a": 1 }, "another_dict_field_3": { "test_dict_field_a": 1, "test_dict_field_b": 2 }, "int_field": 1, "not": False } meta = models_generator._convert(data) assert meta == { "dict_field": DDict(Unknown), "another_dict_field": DDict(DUnion(int, StringLiteral({"a"}))), "another_dict_field_2": DDict(int), "another_dict_field_3": DDict(int), "int_field": int, "not": bool }
def main(): data = load_data() del data["paths"] gen = MetadataGenerator( dict_keys_regex=[], dict_keys_fields=["securityDefinitions", "paths", "responses", "definitions", "properties", "scopes"] ) reg = ModelRegistry(ModelFieldsPercentMatch(.5), ModelFieldsNumberMatch(10)) fields = gen.generate(data) reg.process_meta_data(fields, model_name="Swagger") reg.merge_models(generator=gen) reg.generate_names() structure = compose_models_flat(reg.models_map) code = generate_code(structure, AttrsModelCodeGenerator) print(code)
def test_merge_field_sets(models_generator: MetadataGenerator, value, expected): shuffled = value[:] shuffle(shuffled) for v in (value, reversed(value), shuffled): result = models_generator.merge_field_sets(v) if isinstance(result, OrderedDict): result = dict(result) assert result == expected
def main(): data = load_data() start_t = datetime.now() gen = MetadataGenerator() reg = ModelRegistry() print("Start generating metadata...") fields = gen.generate(data) print("Start generating models tree...") reg.process_meta_data(fields, model_name="Cities") print("Merging models...") reg.merge_models(generator=gen) print("Generating names...") reg.generate_names() print("Generating structure...") structure = compose_models(reg.models_map) print("Generating final code...") code = generate_code(structure, AttrsModelCodeGenerator) print(code) print(f"{(datetime.now() - start_t).total_seconds():.4f} seconds")
def test_models_names(models_generator: MetadataGenerator, models_registry: ModelRegistry, value: Tuple[str, dict], expected: Set[Tuple[str, bool]]): model_name, data = value fields = models_generator.generate(data) if isinstance(model_name, bytes): models_registry.process_meta_data(fields, model_name=model_name.decode()) elif isinstance(model_name, str): ptr = models_registry.process_meta_data(fields) ptr.type.name = model_name models_registry.merge_models(models_generator) models_registry.generate_names() names = {(model.name, model.is_name_generated) for model in models_registry.models} assert names == expected
def models_generator(): return MetadataGenerator(dict_keys_regex=[r"^test_dict_field_\w+$"], dict_keys_fields=["dict_field"])
def test_parse(models_generator: MetadataGenerator, value, expected): cls = models_generator._detect_type(value) result = cls.to_internal_value(value) assert result == expected assert value in result.to_representation()
def test_optimize_type(models_generator: MetadataGenerator, value, expected): result = models_generator.optimize_type(value) assert result == expected
def test_detect_type(models_generator: MetadataGenerator, value, expected): assert models_generator._detect_type(value) == expected
ignore_ptr=ignore_ptr) elif isinstance(value, ComplexType): yield f"{type(value).__name__}:" for t in value.types: yield from _pprint_gen(t, lvl=lvl + 1, ignore_ptr=ignore_ptr) elif isclass(value) and issubclass(value, StringSerializable): yield f"(type=<class '{value.__name__}'>)" else: yield f"(type={getattr(value, 'type', value)})" def pretty_format_meta(value, ignore_ptr=False): return "".join(_pprint_gen(value, ignore_ptr=ignore_ptr)) if __name__ == '__main__': gen = MetadataGenerator() reg = ModelRegistry() fields = gen.generate(*test_data) model = reg.process_meta_data(fields) print(pretty_format_meta(model)) print('\n' + '-' * 10, end='') reg.merge_models(generator=gen) for model in reg.models: print(pretty_format_meta(model, ignore_ptr=True))
def test_detect_type(models_generator: MetadataGenerator, value, expected): result = models_generator._detect_type(value) assert result == expected