def validate_required_primitive_elements_2836( cls, values: typing.Dict[str, typing.Any] ) -> typing.Dict[str, typing.Any]: """https://www.hl7.org/fhir/extensibility.html#Special-Case In some cases, implementers might find that they do not have appropriate data for an element with minimum cardinality = 1. In this case, the element must be present, but unless the resource or a profile on it has made the actual value of the primitive data type mandatory, it is possible to provide an extension that explains why the primitive value is not present. """ required_fields = [("code", "code__ext")] _missing = object() def _fallback(): return "" errors: typing.List["ErrorWrapper"] = [] for name, ext in required_fields: field = cls.__fields__[name] ext_field = cls.__fields__[ext] value = values.get(field.alias, _missing) if value not in (_missing, None): continue ext_value = values.get(ext_field.alias, _missing) missing_ext = True if ext_value not in (_missing, None): if isinstance(ext_value, dict): missing_ext = len(ext_value.get("extension", [])) == 0 elif ( getattr(ext_value.__class__, "get_resource_type", _fallback)() == "FHIRPrimitiveExtension" ): if ext_value.extension and len(ext_value.extension) > 0: missing_ext = False else: validate_pass = True for validator in ext_field.type_.__get_validators__(): try: ext_value = validator(v=ext_value) except ValidationError as exc: errors.append(ErrorWrapper(exc, loc=ext_field.alias)) validate_pass = False if not validate_pass: continue if ext_value.extension and len(ext_value.extension) > 0: missing_ext = False if missing_ext: if value is _missing: errors.append(ErrorWrapper(MissingError(), loc=field.alias)) else: errors.append( ErrorWrapper(NoneIsNotAllowedError(), loc=field.alias) ) if len(errors) > 0: raise ValidationError(errors, cls) # type: ignore return values
def get_missing_field_error(loc: Tuple[str, ...]) -> ErrorWrapper: if PYDANTIC_1: missing_field_error = ErrorWrapper(MissingError(), loc=loc) else: # pragma: no cover missing_field_error = ErrorWrapper( # type: ignore MissingError(), loc=loc, config=BaseConfig, ) return missing_field_error
def get_missing_field_error(field_alias: str) -> ErrorWrapper: if PYDANTIC_1: missing_field_error = ErrorWrapper(MissingError(), loc=("body", field_alias)) else: # pragma: no cover missing_field_error = ErrorWrapper( # type: ignore MissingError(), loc=("body", field_alias), config=BaseConfig, ) return missing_field_error
def aggregate(): query = objects.filter(q_filters) fields = [] annotations = { 'value': getattr(aggregates, aggregation_function.value.title())(field.value), } try: if group_by: fields += [g.value for g in group_by] query = query.values(*fields).annotate(**annotations) return list(pagination.query(query).values(*fields, 'value')) else: query = query.aggregate(**annotations) return [query] except ProgrammingError as error: if error.__cause__.pgcode in ( psycopg2_error_codes.UNDEFINED_FUNCTION): raise RequestValidationError([ ErrorWrapper(ProgrammingError(), ("query", "aggregation_function")) ]) from error raise
def get_by_name_or_raise( *, db_session, project_id, source_data_format_in=SourceDataFormatRead) -> SourceDataFormatRead: """Returns the source specified or raises ValidationError.""" data_format = get_by_name(db_session=db_session, project_id=project_id, name=source_data_format_in.name) if not data_format: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="SourceDataFormat not found.", source=source_data_format_in.name, ), loc="dataFormat", ) ], model=SourceDataFormatRead, ) return data_format
def request_params_to_args( required_params: Sequence[ModelField], received_params: Union[Mapping[str, Any], QueryParams, Headers], ) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: values = {} errors = [] for field in required_params: if is_scalar_sequence_field(field) and isinstance( received_params, (QueryParams, Headers)): value = received_params.getlist(field.alias) or field.default else: value = received_params.get(field.alias) field_info = field.field_info assert isinstance(field_info, params.Param), "Params must be subclasses of Param" if value is None: if field.required: errors.append( ErrorWrapper(MissingError(), loc=(field_info.in_.value, field.alias))) else: values[field.name] = deepcopy(field.default) continue v_, errors_ = field.validate(value, values, loc=(field_info.in_.value, field.alias)) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[field.name] = v_ return values, errors
def update_service(*, db_session: Session = Depends(get_db), service_id: PrimaryKey, service_in: ServiceUpdate): """Update an existing service.""" service = get(db_session=db_session, service_id=service_id) if not service: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=[{ "msg": "A service with this id does not exist." }], ) try: service = update(db_session=db_session, service=service, service_in=service_in) except IntegrityError: raise ValidationError( [ ErrorWrapper(ExistsError( msg="A service with this name already exists."), loc="name") ], model=ServiceUpdate, ) return service
def get_organization_scope_from_slug(slug: str) -> SessionLocal: """Iterate all organizations looking for a relevant channel_id.""" db_session = SessionLocal() organization = organization_service.get_by_slug(db_session=db_session, slug=slug) db_session.close() if organization: schema_engine = engine.execution_options( schema_translate_map={ None: f"dispatch_organization_{slug}", }) return sessionmaker(bind=schema_engine)() raise ValidationError( [ ErrorWrapper( NotFoundError( msg= f"Organization slug '{slug}' not found. Check your spelling." ), loc="organization", ) ], model=BaseModel, )
def get_by_name_or_raise( *, db_session, project_id: int, incident_priority_in=IncidentPriorityRead) -> IncidentPriority: """Returns the incident_priority specified or raises ValidationError.""" incident_priority = get_by_name(db_session=db_session, project_id=project_id, name=incident_priority_in.name) if not incident_priority: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="IncidentPriority not found.", incident_priority=incident_priority_in.name, ), loc="incident_priority", ) ], model=IncidentPriorityRead, ) return incident_priority
def correct_length_mapping(cls, mapping): """Validates that each mapping contains only 2 values (original & changed) and at most 1000 values""" # if no mapping is provided (i.e. when updating only name), do nothing if not mapping: return mapping invalid_value_idx = [] for idx, elem in enumerate(mapping): if len(elem) != 2: invalid_value_idx.append(idx) if invalid_value_idx: raise ValidationError( [ ErrorWrapper( ValueError( f'This mapping contains {len(mapping[idx])} values instead of 2' ), str(idx + 1)) for idx in invalid_value_idx ], cls, ) if len(mapping) > _MAX_MAPPING_CNT: raise ValueError( f'There is {len(mapping)} mappings, but Husky only supports {_MAX_MAPPING_CNT} mappings.' ) return mapping
async def request_body_to_args( required_params: List[Field], received_body: Dict[str, Any]) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: values = {} errors = [] if required_params: field = required_params[0] embed = getattr(field.schema, "embed", None) if len(required_params) == 1 and not embed: received_body = {field.alias: received_body} for field in required_params: value = received_body.get(field.alias) if value is None: if field.required: errors.append( ErrorWrapper(MissingError(), loc=("body", field.alias), config=BaseConfig)) else: values[field.name] = deepcopy(field.default) continue v_, errors_ = field.validate(value, values, loc=("body", field.alias)) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[field.name] = v_ return values, errors
def validate_joins_correct_taxons(cls, values): """Check list of fields in joins against all available taxons on model.""" if 'attributes' in values and 'joins' in values: attributes: List[FdqModelAttribute] = values['attributes'] joins: List[FdqModelJoin] = values['joins'] # get set of available taxon slugs available_taxon_slugs = set( cls._get_available_attrs_taxon_slugs(attributes)) # for each join, verify that all its taxons are available in this model invalid_joins: Dict[int, Set[str]] = {} for idx, join in enumerate(joins): missing_taxons = set(join.taxons) - available_taxon_slugs if len(missing_taxons): invalid_joins[idx] = missing_taxons if invalid_joins: # report invalid joins raise ValidationError( [ ErrorWrapper( ValueError( f'Join {idx + 1} contains missing fields {",".join(taxon_slugs)}' ), 'joins') for idx, taxon_slugs in invalid_joins.items() ], cls, ) return values
def __init__(self, **data: Any) -> None: """Initializes a BaseModel with the provided data. Raises: ValidationError: A validation error when the data is invalid. """ try: super().__init__(**data) except ValidationError as e: # Give a special message for faulty list input for re in e.raw_errors: if (hasattr(re, "_loc") and hasattr(re.exc, "msg_template") and isinstance(data.get(to_key(re._loc)), list)): re.exc.msg_template += ( f". The key {re._loc} might be duplicated in the input file." ) # Update error with specific model location name identifier = self._get_identifier(data) if identifier is None: raise e else: # If there is an identifier, include this in the ValidationError messages. raise ValidationError([ErrorWrapper(e, loc=identifier)], self.__class__)
def __init__(__pydantic_self__, **data: Any) -> None: """ """ resource_type = data.pop("resource_type", None) errors = [] if ("resourceType" in data and "resourceType" not in __pydantic_self__.__fields__): resource_type = data.pop("resourceType", None) if (resource_type is not None and resource_type != __pydantic_self__.__fields__["resource_type"].default): expected_resource_type = __pydantic_self__.__fields__[ "resource_type"].default error = (f"``{__pydantic_self__.__class__.__module__}." f"{__pydantic_self__.__class__.__name__}`` " f"expects resource type ``{expected_resource_type}``, " f"but got ``{resource_type}``. " "Make sure resource type name is correct and right " "ModelClass has been chosen.") errors.append( ErrorWrapper(WrongResourceType(error=error), loc="resource_type")) if errors: raise ValidationError(errors, __pydantic_self__.__class__) BaseModel.__init__(__pydantic_self__, **data)
async def search( search_context: SearchContext = Depends(get_es_search_context)): """ :param search_context: :return: BundleType """ try: bundle = await fhir_search( search_context, params=search_context.engine.request.query_params.multi_items() or None, response_as_dict=True, ) return fhir_rest_response(search_context.engine.request, bundle, status_code=200) except SearchValidationError as exc: new_exc = RequestValidationError( [ErrorWrapper(exc, ("request", ))], body="Search Validation Error", ) # some hack new_exc.code = "value" new_exc.system_code = "MSG_BAD_FORMAT" raise new_exc
def get_by_email_and_project_id_or_raise( *, db_session, project_id: int, individual_contact_in=IndividualContactRead) -> IndividualContactRead: """Returns the individual specified or raises ValidationError.""" individual_contact = get_by_email_and_project( db_session=db_session, project_id=project_id, email=individual_contact_in.email) if not individual_contact: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="Indivdual not found.", individual=individual_contact_in.email, ), loc="individual", ) ], model=IndividualContactRead, ) return individual_contact
def get_by_name_or_raise( *, db_session, project_id, source_transport_in=SourceTransportRead) -> SourceTransportRead: """Returns the source transport specified or raises ValidationError.""" source = get_by_name(db_session=db_session, project_id=project_id, name=source_transport_in.name) if not source: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="SourceTransport not found.", source=source_transport_in.name, ), loc="source", ) ], model=SourceTransportRead, ) return source
def create_organization( *, db_session: Session = Depends(get_db), organization_in: OrganizationCreate, current_user: DispatchUser = Depends(get_current_user), ): """Create a new organization.""" try: organization = create(db_session=db_session, organization_in=organization_in) except IntegrityError: raise ValidationError( [ ErrorWrapper(ExistsError( msg="An organization with this name already exists."), loc="name") ], model=OrganizationCreate, ) # add creator as organization owner add_user(db_session=db_session, organization=organization, user=current_user, role=UserRoles.owner) return organization
class MockException: raw_errors = [ ValidationError( errors=[ErrorWrapper(AnyStrMaxLengthError(limit_value=10), ('name',))], model=MockModel ) ]
def update_tag_type(*, db_session: Session = Depends(get_db), tag_type_id: PrimaryKey, tag_type_in: TagTypeUpdate): """Update a tag type.""" tag_type = get(db_session=db_session, tag_type_id=tag_type_id) if not tag_type: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=[{ "msg": "A tag type with this id does not exist." }], ) try: tag_type = update(db_session=db_session, tag_type=tag_type, tag_type_in=tag_type_in) except IntegrityError: raise ValidationError( [ ErrorWrapper(ExistsError( msg="A tag type with this name already exists."), loc="name") ], model=TagTypeUpdate, ) return tag_type
def _validate_feature_server_config(cls, values): # Having no feature server is the default. if "feature_server" not in values: return values # Skip if we aren't creating the configuration from a dict if not isinstance(values["feature_server"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults if "provider" not in values: raise FeastProviderNotSetError() feature_server_type = FEATURE_SERVER_TYPE_FOR_PROVIDER.get( values["provider"]) defined_type = values["feature_server"].get("type") # Make sure that the type is either not set, or set correctly, since it's defined by the provider if defined_type not in (None, feature_server_type): raise FeastFeatureServerTypeSetError(defined_type) values["feature_server"]["type"] = feature_server_type # Validate the dict to ensure one of the union types match try: feature_server_config_class = get_feature_server_config_from_type( feature_server_type) feature_server_config_class(**values["feature_server"]) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="feature_server")], model=RepoConfig, ) return values
def update_organization( *, db_session: Session = Depends(get_db), organization_id: PrimaryKey, organization_in: OrganizationUpdate, ): """Update an organization.""" organization = get(db_session=db_session, organization_id=organization_id) if not organization: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=[{ "msg": "An organization with this id does not exist." }], ) try: organization = update(db_session=db_session, organization=organization, organization_in=organization_in) except IntegrityError: raise ValidationError( [ ErrorWrapper(ExistsError( msg="An organization with this name already exists."), loc="name") ], model=OrganizationUpdate, ) return organization
def create_service( *, db_session: Session = Depends(get_db), service_in: ServiceCreate = Body( ..., example={ "name": "myService", "type": "pagerduty", "is_active": True, "external_id": "234234", }, ), ): """Create a new service.""" service = get_by_external_id_and_project_name( db_session=db_session, external_id=service_in.external_id, project_name=service_in.project.name, ) if service: raise ValidationError( [ ErrorWrapper( ExistsError( msg="A service with this external_id already exists."), loc="external_id", ) ], model=ServiceCreate, ) service = create(db_session=db_session, service_in=service_in) return service
def get_by_name_or_raise( *, db_session, project_id, source_status_in=SourceStatusRead) -> SourceStatusRead: """Returns the status specified or raises ValidationError.""" status = get_by_name(db_session=db_session, project_id=project_id, name=source_status_in.name) if not status: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="SourceStatus not found.", status=source_status_in.name, ), loc="status", ) ], model=SourceStatusRead, ) return status
def _validate_offline_store_config(cls, values): # Set empty offline_store config if it isn't set explicitly if "offline_store" not in values: values["offline_store"] = dict() # Skip if we aren't creating the configuration from a dict if not isinstance(values["offline_store"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults assert "provider" in values # Set the default type if "type" not in values["offline_store"]: if values["provider"] == "local": values["offline_store"]["type"] = "file" elif values["provider"] == "gcp": values["offline_store"]["type"] = "bigquery" elif values["provider"] == "aws": values["offline_store"]["type"] = "redshift" offline_store_type = values["offline_store"]["type"] # Validate the dict to ensure one of the union types match try: offline_config_class = get_offline_config_from_type( offline_store_type) offline_config_class(**values["offline_store"]) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="offline_store")], model=RepoConfig, ) return values
def create_vps_server(db: Session = Depends(get_db), *, vps_profile: VpsCreateSchema): # validate vps_isp_obj = crud_isp.get(db_session=db, id=vps_profile.isp_id) if not vps_isp_obj: raise ValidationError( [ErrorWrapper(Exception('provider_name is not matched'), loc="isp_id")], model=VpsCreateSchema, ) # create base vps data rp = RedisPool() vps_spec_data = rp.get_vps_spec_value( db_session=db, isp_id=vps_profile.isp_id, os_code=vps_profile.os_code, plan_code=vps_profile.plan_code, region_code=vps_profile.region_code ) vps_config = dict( hostname=vps_profile.hostname, isp_id=vps_profile.isp_id, ssh_keys=vps_profile.ssh_keys, remark=vps_profile.remark, status=vps_profile.status, **vps_spec_data ) vps_obj = crud_vps.create(db_session=db, obj_in=vps_config, serializer=None) task = celery_app.send_task( "create_vps", args=[vps_profile.dict(), vps_obj.id] ) return dict(result=task)
def validate(cls: Type["Range"], value: Any) -> "Range": """ Validate the passed in value """ if isinstance(value, str): match = RANGE_REGEX.fullmatch(value) if not match: raise ValidationError( [ ErrorWrapper(ValueError(f"Unable to parse Range!"), loc=cls.__name__) ], cls, ) match_groups = match.groupdict() return cls( unit=match_groups["unit"], ranges=[ Subrange(**{ k: int(v) if v else None for k, v in m.groupdict().items() }) for m in SUBRANGE_REGEX.finditer(match_groups["ranges"]) ], ) return super().validate(value)
def get_class_by_tablename(table_fullname: str) -> Any: """Return class reference mapped to table.""" def _find_class(name): for c in Base._decl_class_registry.values(): if hasattr(c, "__table__"): if c.__table__.fullname.lower() == name.lower(): return c mapped_name = resolve_table_name(table_fullname) mapped_class = _find_class(mapped_name) # try looking in the 'dispatch_core' schema if not mapped_class: mapped_class = _find_class(f"dispatch_core.{mapped_name}") if not mapped_class: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="Model not found. Check the name of your model."), loc="filter", ) ], model=BaseModel, ) return mapped_class
def request_params_to_args( required_params: Sequence[Field], received_params: Union[Mapping[str, Any], QueryParams, Headers], ) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: values = {} errors = [] for field in required_params: if field.shape in sequence_shapes and isinstance( received_params, (QueryParams, Headers) ): value = received_params.getlist(field.alias) else: value = received_params.get(field.alias) schema: params.Param = field.schema assert isinstance(schema, params.Param), "Params must be subclasses of Param" if value is None: if field.required: errors.append( ErrorWrapper( MissingError(), loc=(schema.in_.value, field.alias), config=BaseConfig, ) ) else: values[field.name] = deepcopy(field.default) continue v_, errors_ = field.validate(value, values, loc=(schema.in_.value, field.alias)) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[field.name] = v_ return values, errors
def _validate_feature_server_config(cls, values): # Having no feature server is the default. if "feature_server" not in values: return values # Skip if we aren't creating the configuration from a dict if not isinstance(values["feature_server"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults if "provider" not in values: raise FeastProviderNotSetError() # Make sure that the type is not set, since we will set it based on the provider. if "type" in values["feature_server"]: raise FeastFeatureServerTypeSetError(values["feature_server"]["type"]) # Set the default type. We only support AWS Lambda for now. if values["provider"] == "aws": values["feature_server"]["type"] = "aws_lambda" feature_server_type = values["feature_server"]["type"] # Validate the dict to ensure one of the union types match try: feature_server_config_class = get_feature_server_config_from_type( feature_server_type ) feature_server_config_class(**values["feature_server"]) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="feature_server")], model=RepoConfig, ) return values