def _edit_equipment_type( client: GraphqlClient, equipment_type_id: str, name: str, category: str, properties: List[Dict[str, Any]], position_definitions: List[Dict[str, Any]], port_definitions: List[Dict[str, Any]], ) -> EditEquipmentTypeMutation.EditEquipmentTypeMutationData.EquipmentType: return EditEquipmentTypeMutation.execute( client, EditEquipmentTypeInput( id=equipment_type_id, name=name, category=category, positions=[ from_dict( data_class=EquipmentPositionInput, data=pos, config=Config(strict=True), ) for pos in position_definitions ], ports=[ from_dict(data_class=EquipmentPortInput, data=port, config=Config(strict=True)) for port in port_definitions ], properties=[ from_dict(data_class=PropertyTypeInput, data=prop, config=Config(strict=True)) for prop in properties ], ), ).__dict__[EDIT_EQUIPMENT_TYPE_MUTATION_NAME]
def _add_equipment_type( client: SymphonyClient, name: str, category: Optional[str], properties: List[PropertyTypeInput], position_definitions: List[Dict[str, str]], port_definitions: List[Dict[str, str]], ) -> AddEquipmentTypeMutation.AddEquipmentTypeMutationData.EquipmentType: return AddEquipmentTypeMutation.execute( client, AddEquipmentTypeInput( name=name, category=category, positions=[ from_dict( data_class=EquipmentPositionInput, data=pos, config=Config(strict=True), ) for pos in position_definitions ], ports=[ from_dict( data_class=EquipmentPortInput, data=port, config=Config(strict=True) ) for port in port_definitions ], properties=properties, ), ).__dict__[ADD_EQUIPMENT_TYPE_MUTATION_NAME]
def test_get_value_for_missing_value(): @dataclass class X: i: int config = Config() with pytest.raises(ValueNotFoundError): config.get_value(field=fields(X)[0], data={})
def test_validate_config_with_wrong_data_key_name(): @dataclass class X: i: int config = Config(remap={"i": "y"}) with pytest.raises(InvalidConfigurationError): config.validate(data_class=X, data={"i": 1})
def test_get_value_for_field_with_transform(): @dataclass class X: i: int config = Config(transform={"i": lambda v: v + 1}) value = config.get_value(field=fields(X)[0], data={"i": 1}) assert value == 2
def test_get_value_for_field_with_empty_config(): @dataclass class X: i: int config = Config() value = config.get_value(field=fields(X)[0], data={"i": 1}) assert value == 1
def test_get_value_for_remapped_field(): @dataclass class X: i: int config = Config(remap={"i": "j"}) value = config.get_value(field=fields(X)[0], data={"j": 1}) assert value == 1
def test_get_value_for_field_with_cast(): @dataclass class X: i: int config = Config(cast=["i"]) value = config.get_value(field=fields(X)[0], data={"i": "1"}) assert value == 1
def test_validate_empty_config(): @dataclass class X: i: int config = Config() try: config.validate(data_class=X, data={"i": 1}) except InvalidConfigurationError: pytest.fail("empty config should be valid")
def test_validate_config_with_correct_remap(): @dataclass class X: i: int config = Config(remap={"i": "j"}) try: config.validate(data_class=X, data={"j": 1}) except InvalidConfigurationError: pytest.fail("this config should be valid")
def test_get_value_for_prefixed_field(): @dataclass class X: i: int @dataclass class Y: x: X config = Config(prefixed={"x": "x_"}) value = config.get_value(field=fields(Y)[0], data={"x_i": 1}) assert value == {"i": 1}
def test_get_value_for_flattened_field(): @dataclass class X: i: int @dataclass class Y: x: X config = Config(flattened=["x"]) value = config.get_value(field=fields(Y)[0], data={"i": 1}) assert value == {"i": 1}
def test_make_inner(): @dataclass class X: i: int @dataclass class Y: x: X config = Config(remap={"x.i": "y"}, check_types=False) inner_config = config.make_inner(fields(Y)[0]) assert inner_config == Config(remap={"i": "y"}, check_types=False)
def test_validate_config_with_wrong_remap_field_name(): @dataclass class X: i: int config = Config(remap={"x": "y"}) with pytest.raises(InvalidConfigurationError) as exception_info: config.validate(data_class=X, data={"i": 1}) assert str(exception_info.value) == 'invalid value in "remap" configuration: "x". Choices are: i' assert exception_info.value.parameter == "remap" assert exception_info.value.available_choices == {"i"} assert exception_info.value.value == "x"
def handle_request(self, route: Route, data: JSON) -> Dict: """ A generic handler for all requests. Parses the request to a python object according to the request_map and execute the according function. """ try: route_metadata = request_map[route] schema = route_metadata["schema"] request_class = route_metadata["request_class"] except KeyError: raise BadCodingError("Invalid route metadata: " + route) try: request_data = schema(data) except fastjsonschema.JsonSchemaException as e: raise InvalidRequest(e.message) try: request_object = from_dict( request_class, request_data, Config(check_types=False) ) except (TypeError, MissingValueError) as e: raise BadCodingError("Invalid data to initialize class\n" + str(e)) reader = injector.get(Reader) function = getattr(reader, route) return function(request_object)
def from_dict(data_class, data, config): data["t"] = "prefix {}".format(data["t"]) return from_dict( data_class=data_class, data=data, config=Config(type_hooks={date: date.fromtimestamp}), )
def test_custom_from_dict_in_nested_data_class(): @dataclass class X: d: date t: str def from_dict(data_class, data, config): data["t"] = "prefix {}".format(data["t"]) return from_dict( data_class=data_class, data=data, config=Config(type_hooks={date: date.fromtimestamp}), ) @dataclass class Y: d: date x: X config = Config(type_hooks={date: date.fromordinal}) data = {"d": 737790, "x": {"d": 1607511900.985121, "t": "abc"}} result = from_dict(Y, data, config=config) assert result == Y( d=date(2020, 12, 31), x=X( d=date(2020, 12, 9), t="prefix abc", ), )
def load_endpoints(): files = (os.path.join(settings.QUERIES_DIR, file) for file in os.listdir(settings.QUERIES_DIR)) files = [ file for file in files if os.path.isfile(file) and file.endswith('.yaml') ] endpoints = {} config = Config(forward_references={ 'Object': Object, 'Select': Select, 'Field': Field }, type_hooks={TypeEnum: TypeEnum.create}) for file in files: with open(file) as f: data = yaml.safe_load(f) endpoint = from_dict(data_class=Endpoint, data=data, config=config) endpoints[endpoint.name] = endpoint validate_selects(endpoints) validate_pagination_key(endpoints) EndpointStorage.endpoints = endpoints return endpoints
def add_service_type( client: SymphonyClient, name: str, hasCustomer: bool, properties: List[Tuple[str, str, Optional[PropertyValue], Optional[bool]]], ) -> ServiceType: new_property_types = format_properties(properties) result = AddServiceTypeMutation.execute( client, data=ServiceTypeCreateData( name=name, hasCustomer=hasCustomer, properties=[ from_dict( data_class=PropertyTypeInput, data=p, config=Config(strict=True) ) for p in new_property_types ], ), ).addServiceType service_type = ServiceType( name=result.name, id=result.id, hasCustomer=result.hasCustomer, propertyTypes=[asdict(p) for p in result.propertyTypes], ) client.serviceTypes[name] = service_type return service_type
def bq_row_to_dataclass_instance(self, bq_row: Row) -> T: """ Create a dataclass instance from a row returned by the bq library. """ return from_dict(self._schema, bq_row, config=Config(check_types=False))
def test_from_dict_with_strict_unions_match_and_ambiguous_match(): @dataclass class X: i: int @dataclass class Y: i: int @dataclass class Z: u: Union[X, Y] data = { "u": { "i": 1 }, } with pytest.raises(StrictUnionMatchError) as exception_info: from_dict(Z, data, Config(strict_unions_match=True)) assert str( exception_info.value ) == 'can not choose between possible Union matches for field "u": X, Y'
def format_property_definitions( properties: List[PropertyDefinition], ) -> List[PropertyTypeInput]: property_types = [ from_dict( data_class=PropertyTypeInput, data={ "name": prop.property_name, "type": PropertyKind(prop.property_kind), "index": i, "externalId": prop.external_id, "isMandatory": prop.is_mandatory, **_get_property_default_value(prop.property_name, prop.property_kind.value, prop.default_value), "isInstanceProperty": not prop.is_fixed, "isDeleted": prop.is_deleted, }, config=Config(strict=True), ) for i, prop in enumerate(properties) ] return property_types
def test_from_dict_with_transform_of_missing_optional_field(): @dataclass class X: s: Optional[str] result = from_dict(X, {}, Config(transform={"s": str.lower})) assert result == X(s=None)
def test_from_dict_with_type_hooks(): @dataclass class X: s: str result = from_dict(X, {"s": "TEST"}, Config(type_hooks={str: str.lower})) assert result == X(s="test")
def test_from_dict_with_type_hooks_and_optional(): @dataclass class X: s: Optional[str] result = from_dict(X, {"s": "TEST"}, Config(type_hooks={str: str.lower})) assert result == X(s="test")
def test_from_dict_with_cast_and_generic_collection(): @dataclass class X: s: List[int] result = from_dict(X, {"s": (1, )}, Config(cast=[List])) assert result == X(s=[1])
def test_from_dict_with_cast(): @dataclass class X: s: str result = from_dict(X, {"s": 1}, Config(cast=[str])) assert result == X(s="1")
def test_from_dict_with_type_hooks_and_union(): @dataclass class X: s: Union[str, int] result = from_dict(X, {"s": "TEST"}, Config(type_hooks={str: str.lower})) assert result == X(s="test")
def test_from_dict_with_type_hooks_and_generic_sequence(): @dataclass class X: c: List[str] result = from_dict(X, {"c": ["TEST"]}, config=Config(type_hooks={str: str.lower})) assert result == X(c=["test"])
def get_forecast(area_code: int | str, raw: bool = False): if type(raw) is not bool: raise TypeError(f"raw argument must be bool, not {type(raw).__name__}") forecast = _jma_get( f"/forecast/data/forecast/{area_code}.json")[0] if raw: return forecast return from_dict(Forecast, decamelize(forecast), Config({datetime: datetime.fromisoformat}))