class QueueSchema(BaseSchema): id = fields.Integer(dump_only=True) tenant_uuid = fields.String(dump_only=True) name = fields.String( validate=(Regexp(NAME_REGEX), NoneOf(['general']), Length(max=128)), required=True, ) label = fields.String(validate=Length(max=128), missing=None) data_quality = StrictBoolean(attribute='data_quality_bool') dtmf_hangup_callee_enabled = StrictBoolean() dtmf_hangup_caller_enabled = StrictBoolean() dtmf_transfer_callee_enabled = StrictBoolean() dtmf_transfer_caller_enabled = StrictBoolean() dtmf_record_callee_enabled = StrictBoolean() dtmf_record_caller_enabled = StrictBoolean() retry_on_timeout = StrictBoolean() ring_on_hold = StrictBoolean() timeout = fields.Integer(validate=Range(min=0), allow_none=True) announce_hold_time_on_entry = StrictBoolean() ignore_forward = StrictBoolean(attribute='ignore_forward_bool') preprocess_subroutine = fields.String(validate=Length(max=39), allow_none=True) music_on_hold = fields.String(validate=Length(max=128), allow_none=True) wait_time_threshold = fields.Integer(validate=Range(min=0), allow_none=True) wait_time_destination = DestinationField(allow_none=True) wait_ratio_threshold = fields.Float(validate=Range(min=0), allow_none=True) wait_ratio_destination = DestinationField(allow_none=True) caller_id_mode = fields.String(validate=OneOf( ['prepend', 'overwrite', 'append']), allow_none=True) caller_id_name = fields.String(validate=Length(max=80), allow_none=True) enabled = StrictBoolean() options = fields.List( fields.List(fields.String(), validate=Length(equal=2))) links = ListLink(Link('queues')) extensions = fields.Nested( 'ExtensionSchema', only=['id', 'exten', 'context', 'links'], many=True, dump_only=True, ) schedules = fields.Nested('ScheduleSchema', only=['id', 'name', 'links'], many=True, dump_only=True) agent_queue_members = fields.Nested('QueueAgentQueueMembersSchema', many=True, dump_only=True) user_queue_members = fields.Nested('QueueUserQueueMembersSchema', many=True, dump_only=True) @post_load def create_objects(self, data): for key in ('wait_time_destination', 'wait_ratio_destination'): if data.get(key): data[key] = Dialaction(**data[key]) return data @post_dump def wrap_members(self, data): if not self.only or 'members' in self.only: data['members'] = { 'agents': data.pop('agent_queue_members', []), 'users': data.pop('user_queue_members', []), } return data
class AreaSchema(Schema): id = fields.Integer(dump_only=True, required=True, validate=Range(min=1)) name = fields.String(required=True, validate=Length(min=1)) country_code = fields.String(required=True, validate=Length(min=1)) active = fields.Boolean(missing=1) country = fields.Nested('CountrySchema')
class ChartDataBoxplotOptionsSchema(ChartDataPostProcessingOperationOptionsSchema): """ Boxplot operation config. """ groupby = fields.List( fields.String(description="Columns by which to group the query.",), allow_none=True, ) metrics = fields.List( fields.Raw(), description="Aggregate expressions. Metrics can be passed as both " "references to datasource metrics (strings), or ad-hoc metrics" "which are defined only within the query object. See " "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.", ) whisker_type = fields.String( description="Whisker type. Any numpy function will work.", validate=validate.OneOf( choices=([val.value for val in PostProcessingBoxplotWhiskerType]) ), required=True, example="tukey", ) percentiles = fields.Tuple( ( fields.Float( description="Lower percentile", validate=[ Range( min=0, max=100, min_inclusive=False, max_inclusive=False, error=_( "lower percentile must be greater than 0 and less " "than 100. Must be lower than upper percentile." ), ), ], ), fields.Float( description="Upper percentile", validate=[ Range( min=0, max=100, min_inclusive=False, max_inclusive=False, error=_( "upper percentile must be greater than 0 and less " "than 100. Must be higher than lower percentile." ), ), ], ), ), description="Upper and lower percentiles for percentile whisker type.", example=[1, 99], )
class SessionScoresListSchema(Schema): limit = fields.Int(missing=20, validate=Range(min=1, max=100)) offset = fields.Int(missing=0, validate=Range(min=0))
def __init__(self) -> None: super().__init__( Integer(validate=Range(min=0, max=sys.maxsize)), required=True, validate=Length(min=3, max=4), )
class TestSchema(Schema): foo = fields.Integer(validate=Range( min=1, min_inclusive=False, max=3, max_inclusive=False)) bar = fields.Integer(validate=Range(min=2, max=4))
class PostItemReview(Schema): text = fields.Str(validate=Length(1, 1024), required=True) grade = fields.Int(validate=Range(1, 10), required=True)
RING = 6 # Reminds me of Beatstream ? @dataclass class Metadata: cover: Optional[str] # path to album art ? creator: Optional[str] # Chart author background: Optional[str] # path to background image version: Optional[str] # freeform difficulty name id: Optional[int] mode: int time: Optional[int] # creation timestamp ? song: SongInfo PositiveInt = NewType("PositiveInt", int, validate=Range(min=0)) BeatTime = Tuple[PositiveInt, PositiveInt, PositiveInt] StrictlyPositiveDecimal = NewType("StrictlyPositiveDecimal", Decimal, validate=Range(min=0, min_inclusive=False)) @dataclass class BPMEvent: beat: BeatTime bpm: StrictlyPositiveDecimal ButtonIndex = NewType("ButtonIndex", int, validate=Range(min=0, max=15))
class ChartDataQueryObjectSchema(Schema): filters = fields.List(fields.Nested(ChartDataFilterSchema), required=False) granularity = fields.String( description= "Name of temporal column used for time filtering. For legacy Druid " "datasources this defines the time grain.", ) granularity_sqla = fields.String( description="Name of temporal column used for time filtering for SQL " "datasources. This field is deprecated, use `granularity` " "instead.", deprecated=True, ) groupby = fields.List( fields.String(description="Columns by which to group the query.", ), ) metrics = fields.List( fields.Raw(), description="Aggregate expressions. Metrics can be passed as both " "references to datasource metrics (strings), or ad-hoc metrics" "which are defined only within the query object. See " "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.", ) post_processing = fields.List( fields.Nested(ChartDataPostProcessingOperationSchema), description= "Post processing operations to be applied to the result set. " "Operations are applied to the result set in sequential order.", ) time_range = fields.String( description="A time rage, either expressed as a colon separated string " "`since : until` or human readable freeform. Valid formats for " "`since` and `until` are: \n" "- ISO 8601\n" "- X days/years/hours/day/year/weeks\n" "- X days/years/hours/day/year/weeks ago\n" "- X days/years/hours/day/year/weeks from now\n" "\n" "Additionally, the following freeform can be used:\n" "\n" "- Last day\n" "- Last week\n" "- Last month\n" "- Last quarter\n" "- Last year\n" "- No filter\n" "- Last X seconds/minutes/hours/days/weeks/months/years\n" "- Next X seconds/minutes/hours/days/weeks/months/years\n", example="Last week", ) time_shift = fields.String( description="A human-readable date/time string. " "Please refer to [parsdatetime](https://github.com/bear/parsedatetime) " "documentation for details on valid values.", ) is_timeseries = fields.Boolean( description="Is the `query_object` a timeseries.", required=False) timeseries_limit = fields.Integer( description="Maximum row count for timeseries queries. Default: `0`", ) row_limit = fields.Integer( description='Maximum row count. Default: `config["ROW_LIMIT"]`', validate=[ Range(min=1, error=_("`row_limit` must be greater than or equal to 1")) ], ) row_offset = fields.Integer( description="Number of rows to skip. Default: `0`", validate=[ Range(min=0, error=_("`row_offset` must be greater than or equal to 0")) ], ) order_desc = fields.Boolean(description="Reverse order. Default: `false`", required=False) extras = fields.Nested(ChartDataExtrasSchema, required=False) columns = fields.List( fields.String(), description="", ) orderby = fields.List( fields.List(fields.Raw()), description= "Expects a list of lists where the first element is the column " "name which to sort by, and the second element is a boolean ", example=[["my_col_1", False], ["my_col_2", True]], ) where = fields.String( description="WHERE clause to be added to queries using AND operator." "This field is deprecated and should be passed to `extras`.", deprecated=True, ) having = fields.String( description="HAVING clause to be added to aggregate queries using " "AND operator. This field is deprecated and should be passed " "to `extras`.", deprecated=True, ) having_filters = fields.List( fields.Dict(), description= "HAVING filters to be added to legacy Druid datasource queries. " "This field is deprecated and should be passed to `extras` " "as `filters_druid`.", deprecated=True, )
class ReportSchedulePutSchema(Schema): type = fields.String( description=type_description, required=False, validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), ) name = fields.String(description=name_description, required=False, validate=[Length(1, 150)]) description = fields.String( description=description_description, allow_none=True, required=False, example="Daily sales dashboard to marketing", ) context_markdown = fields.String(description=context_markdown_description, allow_none=True, required=False) active = fields.Boolean(required=False) crontab = fields.String( description=crontab_description, validate=[validate_crontab, Length(1, 1000)], required=False, ) timezone = fields.String( description=timezone_description, default="UTC", validate=validate.OneOf(choices=tuple(all_timezones)), ) sql = fields.String( description=sql_description, example="SELECT value FROM time_series_table", required=False, allow_none=True, ) chart = fields.Integer(required=False, allow_none=True) creation_method = EnumField( ReportCreationMethod, by_value=True, allow_none=True, description=creation_method_description, ) dashboard = fields.Integer(required=False, allow_none=True) database = fields.Integer(required=False) owners = fields.List(fields.Integer(description=owners_description), required=False) validator_type = fields.String( description=validator_type_description, validate=validate.OneOf(choices=tuple( key.value for key in ReportScheduleValidatorType)), allow_none=True, required=False, ) validator_config_json = fields.Nested(ValidatorConfigJSONSchema, required=False) log_retention = fields.Integer( description=log_retention_description, example=90, required=False, validate=[Range(min=1, error=_("Value must be greater than 0"))], ) grace_period = fields.Integer( description=grace_period_description, example=60 * 60 * 4, required=False, validate=[Range(min=1, error=_("Value must be greater than 0"))], ) working_timeout = fields.Integer( description=working_timeout_description, example=60 * 60 * 1, allow_none=True, required=False, validate=[Range(min=1, error=_("Value must be greater than 0"))], ) recipients = fields.List(fields.Nested(ReportRecipientSchema), required=False) report_format = fields.String( default=ReportDataFormat.VISUALIZATION, validate=validate.OneOf(choices=tuple(key.value for key in ReportDataFormat)), ) force_screenshot = fields.Boolean(default=False)
def __init__(self) -> None: super().__init__(required=True, validate=Range(min=0, max=4096))
class ReportSchedulePostSchema(Schema): type = fields.String( description=type_description, allow_none=False, required=True, validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), ) name = fields.String( description=name_description, allow_none=False, required=True, validate=[Length(1, 150)], example="Daily dashboard email", ) description = fields.String( description=description_description, allow_none=True, required=False, example="Daily sales dashboard to marketing", ) context_markdown = fields.String(description=context_markdown_description, allow_none=True, required=False) active = fields.Boolean() crontab = fields.String( description=crontab_description, validate=[validate_crontab, Length(1, 1000)], example="*/5 * * * *", allow_none=False, required=True, ) timezone = fields.String( description=timezone_description, default="UTC", validate=validate.OneOf(choices=tuple(all_timezones)), ) sql = fields.String(description=sql_description, example="SELECT value FROM time_series_table") chart = fields.Integer(required=False, allow_none=True) creation_method = EnumField( ReportCreationMethod, by_value=True, required=False, description=creation_method_description, ) dashboard = fields.Integer(required=False, allow_none=True) selected_tabs = fields.List(fields.Integer(), required=False, allow_none=True) database = fields.Integer(required=False) owners = fields.List(fields.Integer(description=owners_description)) validator_type = fields.String( description=validator_type_description, validate=validate.OneOf(choices=tuple( key.value for key in ReportScheduleValidatorType)), ) validator_config_json = fields.Nested(ValidatorConfigJSONSchema) log_retention = fields.Integer( description=log_retention_description, example=90, validate=[Range(min=1, error=_("Value must be greater than 0"))], ) grace_period = fields.Integer( description=grace_period_description, example=60 * 60 * 4, default=60 * 60 * 4, validate=[Range(min=1, error=_("Value must be greater than 0"))], ) working_timeout = fields.Integer( description=working_timeout_description, example=60 * 60 * 1, default=60 * 60 * 1, validate=[Range(min=1, error=_("Value must be greater than 0"))], ) recipients = fields.List(fields.Nested(ReportRecipientSchema)) report_format = fields.String( default=ReportDataFormat.VISUALIZATION, validate=validate.OneOf(choices=tuple(key.value for key in ReportDataFormat)), ) extra = fields.Dict(default=None, ) force_screenshot = fields.Boolean(default=False) @validates_schema def validate_report_references( # pylint: disable=unused-argument,no-self-use self, data: Dict[str, Any], **kwargs: Any) -> None: if data["type"] == ReportScheduleType.REPORT: if "database" in data: raise ValidationError({ "database": ["Database reference is not allowed on a report"] })
class ChartDataQueryObjectSchema(Schema): class Meta: # pylint: disable=too-few-public-methods unknown = EXCLUDE datasource = fields.Nested(ChartDataDatasourceSchema, allow_none=True) result_type = EnumField(ChartDataResultType, by_value=True, allow_none=True) annotation_layers = fields.List( fields.Nested(AnnotationLayerSchema), description="Annotation layers to apply to chart", allow_none=True, ) applied_time_extras = fields.Dict( description= "A mapping of temporal extras that have been applied to the query", allow_none=True, example={"__time_range": "1 year ago : now"}, ) apply_fetch_values_predicate = fields.Boolean( description="Add fetch values predicate (where clause) to query " "if defined in datasource", allow_none=True, ) filters = fields.List(fields.Nested(ChartDataFilterSchema), allow_none=True) granularity = fields.String( description= "Name of temporal column used for time filtering. For legacy Druid " "datasources this defines the time grain.", allow_none=True, ) granularity_sqla = fields.String( description="Name of temporal column used for time filtering for SQL " "datasources. This field is deprecated, use `granularity` " "instead.", allow_none=True, deprecated=True, ) groupby = fields.List( fields.Raw(), description="Columns by which to group the query. " "This field is deprecated, use `columns` instead.", allow_none=True, ) metrics = fields.List( fields.Raw(), description="Aggregate expressions. Metrics can be passed as both " "references to datasource metrics (strings), or ad-hoc metrics" "which are defined only within the query object. See " "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.", allow_none=True, ) post_processing = fields.List( fields.Nested(ChartDataPostProcessingOperationSchema, allow_none=True), allow_none=True, description= "Post processing operations to be applied to the result set. " "Operations are applied to the result set in sequential order.", ) time_range = fields.String( description="A time rage, either expressed as a colon separated string " "`since : until` or human readable freeform. Valid formats for " "`since` and `until` are: \n" "- ISO 8601\n" "- X days/years/hours/day/year/weeks\n" "- X days/years/hours/day/year/weeks ago\n" "- X days/years/hours/day/year/weeks from now\n" "\n" "Additionally, the following freeform can be used:\n" "\n" "- Last day\n" "- Last week\n" "- Last month\n" "- Last quarter\n" "- Last year\n" "- No filter\n" "- Last X seconds/minutes/hours/days/weeks/months/years\n" "- Next X seconds/minutes/hours/days/weeks/months/years\n", example="Last week", allow_none=True, ) time_shift = fields.String( description="A human-readable date/time string. " "Please refer to [parsdatetime](https://github.com/bear/parsedatetime) " "documentation for details on valid values.", allow_none=True, ) is_timeseries = fields.Boolean( description="Is the `query_object` a timeseries.", allow_none=True, ) series_columns = fields.List( fields.Raw(), description="Columns to use when limiting series count. " "All columns must be present in the `columns` property. " "Requires `series_limit` and `series_limit_metric` to be set.", allow_none=True, ) series_limit = fields.Integer( description="Maximum number of series. " "Requires `series` and `series_limit_metric` to be set.", allow_none=True, ) series_limit_metric = fields.Raw( description="Metric used to limit timeseries queries by. " "Requires `series` and `series_limit` to be set.", allow_none=True, ) timeseries_limit = fields.Integer( description="Maximum row count for timeseries queries. " "This field is deprecated, use `series_limit` instead." "Default: `0`", allow_none=True, ) timeseries_limit_metric = fields.Raw( description="Metric used to limit timeseries queries by. " "This field is deprecated, use `series_limit_metric` instead.", allow_none=True, ) row_limit = fields.Integer( description= 'Maximum row count (0=disabled). Default: `config["ROW_LIMIT"]`', allow_none=True, validate=[ Range(min=0, error=_("`row_limit` must be greater than or equal to 0")) ], ) row_offset = fields.Integer( description="Number of rows to skip. Default: `0`", allow_none=True, validate=[ Range(min=0, error=_("`row_offset` must be greater than or equal to 0")) ], ) order_desc = fields.Boolean( description="Reverse order. Default: `false`", allow_none=True, ) extras = fields.Nested( ChartDataExtrasSchema, description="Extra parameters to add to the query.", allow_none=True, ) columns = fields.List( fields.Raw(), description="Columns which to select in the query.", allow_none=True, ) orderby = fields.List( fields.Tuple(( fields.Raw( validate=[ Length(min=1, error=_("orderby column must be populated")) ], allow_none=False, ), fields.Boolean(), )), description= "Expects a list of lists where the first element is the column " "name which to sort by, and the second element is a boolean.", allow_none=True, example=[("my_col_1", False), ("my_col_2", True)], ) where = fields.String( description="WHERE clause to be added to queries using AND operator." "This field is deprecated and should be passed to `extras`.", allow_none=True, deprecated=True, ) having = fields.String( description="HAVING clause to be added to aggregate queries using " "AND operator. This field is deprecated and should be passed " "to `extras`.", allow_none=True, deprecated=True, ) having_filters = fields.List( fields.Nested(ChartDataFilterSchema), description= "HAVING filters to be added to legacy Druid datasource queries. " "This field is deprecated and should be passed to `extras` " "as `having_druid`.", allow_none=True, deprecated=True, ) druid_time_origin = fields.String( description="Starting point for time grain counting on legacy Druid " "datasources. Used to change e.g. Monday/Sunday first-day-of-week. " "This field is deprecated and should be passed to `extras` " "as `druid_time_origin`.", allow_none=True, deprecated=True, ) url_params = fields.Dict( description= "Optional query parameters passed to a dashboard or Explore view", keys=fields.String(description="The query parameter"), values=fields.String(description="The value of the query parameter"), allow_none=True, ) is_rowcount = fields.Boolean( description="Should the rowcount of the actual query be returned", allow_none=True, ) time_offsets = fields.List( fields.String(), allow_none=True, )
class ChartDataProphetOptionsSchema( ChartDataPostProcessingOperationOptionsSchema): """ Prophet operation config. """ time_grain = fields.String( description= "Time grain used to specify time period increments in prediction. " "Supports [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) " "durations.", validate=validate.OneOf(choices=[ i for i in { **builtin_time_grains, **config["TIME_GRAIN_ADDONS"] }.keys() if i ]), example="P1D", required=True, ) periods = fields.Integer( descrption= "Time periods (in units of `time_grain`) to predict into the future", min=0, example=7, required=True, ) confidence_interval = fields.Float( description="Width of predicted confidence interval", validate=[ Range( min=0, max=1, min_inclusive=False, max_inclusive=False, error=_( "`confidence_interval` must be between 0 and 1 (exclusive)" ), ) ], example=0.8, required=True, ) yearly_seasonality = fields.Raw( # TODO: add correct union type once supported by Marshmallow description="Should yearly seasonality be applied. " "An integer value will specify Fourier order of seasonality, `None` will " "automatically detect seasonality.", example=False, ) weekly_seasonality = fields.Raw( # TODO: add correct union type once supported by Marshmallow description="Should weekly seasonality be applied. " "An integer value will specify Fourier order of seasonality, `None` will " "automatically detect seasonality.", example=False, ) monthly_seasonality = fields.Raw( # TODO: add correct union type once supported by Marshmallow description="Should monthly seasonality be applied. " "An integer value will specify Fourier order of seasonality, `None` will " "automatically detect seasonality.", example=False, )
class UpdateDeviceSchema(ma.Schema): contact_id = fields.Int(validate=Range(min=1), missing=None) location_id = fields.Int(validate=Range(min=1), missing=None) status = fields.Str(validate=OneOf(choices=dict(DEVICE.STATUSES).values()), missing=None) comment = fields.Str(validate=Length(max=255), missing=None)
class UchannelLOSchema(BaseSchema): """Schema for uchannel LO.""" # Required properties. q = fields.Integer(required=True, validate=Range(min=0)) scale = fields.Complex(required=True)
class TestSchema(Schema): foo = fields.Integer(validate=Range(min=1, max=3))
class TokenRequestSchema(Schema): backend = fields.String(missing='wazo_user') expiration = fields.Integer(validate=Range(min=1))
class AddItemReview(Schema): title = fields.Str(validate=Length(1, 64), required=True) description = fields.Str(validate=Length(1, 1024), required=True) price = fields.Int(validate=Range(1, 1000000), required=True)
class RegisterSIPSchema(BaseSchema): id = fields.Integer(dump_only=True) transport = fields.String(validate=OneOf( ['udp', 'tcp', 'tls', 'ws', 'wss']), allow_none=True) sip_username = fields.String(validate=Regexp(INVALID_CHAR), required=True) auth_username = fields.String(validate=Regexp(INVALID_CHAR), allow_none=True) auth_password = fields.String(validate=Regexp(INVALID_CHAR), allow_none=True) remote_host = fields.String(validate=Regexp(INVALID_REMOTE_HOST), required=True) remote_port = fields.Integer(validate=Range(min=0, max=65535), allow_none=True) callback_extension = fields.String( validate=Regexp(INVALID_CALLBACK_EXTENSION), allow_none=True) expiration = fields.Integer(validate=Range(min=0), allow_none=True) enabled = StrictBoolean(missing=True) links = ListLink(Link('register_sip')) trunk = fields.Nested('TrunkSchema', only=['id', 'links'], dump_only=True) @validates_schema def validate_auth_username(self, data): if data.get('auth_username') and not data.get('auth_password'): raise ValidationError( 'Cannot set field "auth_username" if the field "auth_password" is not set', 'auth_username', ) @validates_schema def validate_total_length(self, data): if len(self.convert_to_chansip(data)['var_val']) > 255: raise ValidationError( 'The sum of all fields is longer than maximum length 255') @post_load def convert_to_chansip(self, data): chansip_fmt = ( '{transport}{sip_username}{auth_password}{auth_username}' '@{remote_host}{remote_port}{callback_extension}{expiration}') data['var_val'] = chansip_fmt.format( transport='{}://'.format(data.get('transport')) if data.get('transport') else '', sip_username=data.get('sip_username'), auth_password='******'.format(data.get('auth_password')) if data.get('auth_password') else '', auth_username='******'.format(data.get('auth_username')) if data.get('auth_username') else '', remote_host=data.get('remote_host'), remote_port=':{}'.format(data.get('remote_port')) if data.get('remote_port') else '', callback_extension='/{}'.format(data.get('callback_extension')) if data.get('callback_extension') else '', expiration='~{}'.format(data.get('expiration')) if data.get('expiration') else '', ) return data @pre_dump def convert_from_chansip(self, data): register = REGISTER_REGEX.match(data.var_val) result = register.groupdict() result['id'] = data.id result['enabled'] = data.enabled result['trunk'] = data.trunk return result
class GameSessionListSchema(Schema): limit = fields.Int(missing=20, validate=Range(min=1, max=100)) offset = fields.Int(missing=0, validate=Range(min=0)) chat_id = fields.Int()
def validate_segment_number(self, value): return Range(1, self._segment_count)(value)
def __init__(self) -> None: super().__init__(required=True, validate=Range(min=0, max=sys.maxsize))
class CreateFoodInputSchema(Schema): name = fields.Str(required=True, validate=Length(min=1, max=30)) calories = fields.Int(required=True, validate=Range(min=0)) category = fields.Str(required=True, validate=Length(min=1, max=15))
class LocationSchema(Schema): id = fields.Integer(dump_only=True, required=False, validate=Range(min=1)) description = fields.String(required=False) address_id = fields.Integer(required=True, validate=Range(min=1)) active = fields.Boolean(missing=1) address = fields.Nested('AddressSchema')
class InsuranceSchema(Schema): """Schema for validation of Insurance fields.""" name = fields.Str(required=True, validate=Length(min=3, max=50)) monthly_price = fields.Float(required=True, validate=Range(min=0.0))
class ServiceSchema(AutoSchema): _id = fields.Integer(attribute='id', dump_only=True) _rev = fields.String(default='', dump_only=True) owned = fields.Boolean(default=False) owner = PrimaryKeyRelatedField('username', dump_only=True, attribute='creator') port = fields.Integer( dump_only=True, strict=True, required=True, validate=[ Range(min=0, error="The value must be greater than or equal to 0") ]) # Port is loaded via ports ports = MutableField(fields.Integer( strict=True, required=True, validate=[ Range(min=0, error="The value must be greater than or equal to 0") ]), fields.Method(deserialize='load_ports'), required=True, attribute='port') status = fields.String(missing='open', validate=OneOf(Service.STATUSES), required=True, allow_none=False) parent = fields.Integer( attribute='host_id') # parent is not required for updates host_id = fields.Integer(attribute='host_id', dump_only=True) vulns = fields.Integer(attribute='vulnerability_count', dump_only=True) credentials = fields.Integer(attribute='credentials_count', dump_only=True) metadata = SelfNestedField(MetadataSchema()) type = fields.Function(lambda obj: 'Service', dump_only=True) summary = fields.String(dump_only=True) def load_ports(self, value): if not isinstance(value, list): raise ValidationError('ports must be a list') if len(value) != 1: raise ValidationError('ports must be a list with exactly one' 'element') port = value.pop() if isinstance(port, str): try: port = int(port) except ValueError: raise ValidationError('The value must be a number') if port > 65535 or port < 1: raise ValidationError('The value must be in the range [1-65535]') return str(port) @post_load def post_load_parent(self, data): """Gets the host_id from parent attribute. Pops it and tries to get a Host with that id in the corresponding workspace. """ host_id = data.pop('host_id', None) if self.context['updating']: if host_id is None: # Partial update? return if host_id != self.context['object'].parent.id: raise ValidationError('Can\'t change service parent.') else: if not host_id: raise ValidationError( 'Parent id is required when creating a service.') try: data['host'] = Host.query.join(Workspace).filter( Workspace.name == self.context['workspace_name'], Host.id == host_id).one() except NoResultFound: raise ValidationError( 'Host with id {} not found'.format(host_id)) class Meta: model = Service fields = ('id', '_id', 'status', 'parent', 'type', 'protocol', 'description', '_rev', 'owned', 'owner', 'credentials', 'vulns', 'name', 'version', '_id', 'port', 'ports', 'metadata', 'summary', 'host_id')
class RegisteredDeviceSchema(ma.Schema): id = fields.Int(validate=Range(min=1), required=True) access_token = fields.Str(validate=Length(min=10, max=255), required=True)
class AnnotationLayerSchema(Schema): annotationType = fields.String( description="Type of annotation layer", validate=validate.OneOf( choices=("EVENT", "FORMULA", "INTERVAL", "TIME_SERIES",) ), ) color = fields.String(description="Layer color", allow_none=True,) descriptionColumns = fields.List( fields.String(), description="Columns to use as the description. If none are provided, " "all will be shown.", ) hideLine = fields.Boolean( description="Should line be hidden. Only applies to line annotations", allow_none=True, ) intervalEndColumn = fields.String( description=( "Column containing end of interval. Only applies to interval layers" ), allow_none=True, ) name = fields.String(description="Name of layer", required=True) opacity = fields.String( description="Opacity of layer", validate=validate.OneOf( choices=("", "opacityLow", "opacityMedium", "opacityHigh"), ), allow_none=True, required=False, ) overrides = fields.Dict( keys=fields.String( desciption="Name of property to be overridden", validate=validate.OneOf( choices=("granularity", "time_grain_sqla", "time_range", "time_shift"), ), ), values=fields.Raw(allow_none=True), description="which properties should be overridable", allow_none=True, ) show = fields.Boolean(description="Should the layer be shown", required=True) showMarkers = fields.Boolean( description="Should markers be shown. Only applies to line annotations.", required=True, ) sourceType = fields.String( description="Type of source for annotation data", validate=validate.OneOf(choices=("", "line", "NATIVE", "table",)), ) style = fields.String( description="Line style. Only applies to time-series annotations", validate=validate.OneOf(choices=("dashed", "dotted", "solid", "longDashed",)), ) timeColumn = fields.String( description="Column with event date or interval start date", allow_none=True, ) titleColumn = fields.String(description="Column with title", allow_none=True,) width = fields.Float( description="Width of annotation line", validate=[ Range( min=0, min_inclusive=True, error=_("`width` must be greater or equal to 0"), ) ], ) value = fields.Raw( description="For formula annotations, this contains the formula. " "For other types, this is the primary key of the source object.", required=True, )
class AnswerListSchema(Schema): limit = fields.Int(missing=20, validate=Range(min=1, max=100)) offset = fields.Int(missing=0, validate=Range(min=0)) question_id = fields.Int() title = fields.Str()