class CertificateInputSchema(CertificateCreationSchema): name = fields.String() common_name = fields.String(required=True, validate=validators.common_name) authority = fields.Nested(AssociatedAuthoritySchema, required=True) validity_start = ArrowDateTime(allow_none=True) validity_end = ArrowDateTime(allow_none=True) validity_years = fields.Integer(allow_none=True) destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True) notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True) replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True) replacements = fields.Nested(AssociatedCertificateSchema, missing=[], many=True) # deprecated roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True) dns_provider = fields.Nested(AssociatedDnsProviderSchema, missing=None, allow_none=True, required=False) csr = fields.String(validate=validators.csr) key_type = fields.String(validate=validate.OneOf(CERTIFICATE_KEY_TYPES), missing='RSA2048') notify = fields.Boolean(default=True) rotation = fields.Boolean() rotation_policy = fields.Nested(AssociatedRotationPolicySchema, missing={'name': 'default'}, allow_none=True, default={'name': 'default'}) # certificate body fields organizational_unit = fields.String(missing=lambda: current_app.config.get( 'LEMUR_DEFAULT_ORGANIZATIONAL_UNIT')) organization = fields.String( missing=lambda: current_app.config.get('LEMUR_DEFAULT_ORGANIZATION')) location = fields.String( missing=lambda: current_app.config.get('LEMUR_DEFAULT_LOCATION')) country = fields.String( missing=lambda: current_app.config.get('LEMUR_DEFAULT_COUNTRY')) state = fields.String( missing=lambda: current_app.config.get('LEMUR_DEFAULT_STATE')) extensions = fields.Nested(ExtensionSchema) @validates_schema def validate_authority(self, data): if not data['authority'].active: raise ValidationError("The authority is inactive.", ['authority']) @validates_schema def validate_dates(self, data): validators.dates(data) @pre_load def load_data(self, data): if data.get('replacements'): data['replaces'] = data[ 'replacements'] # TODO remove when field is deprecated return missing.convert_validity_years(data)
class ChartDataRollingOptionsSchema( ChartDataPostProcessingOperationOptionsSchema): """ Rolling operation config. """ columns = (fields.Dict( description= "columns on which to perform rolling, mapping source column to " "target column. For instance, `{'y': 'y'}` will replace the " "column `y` with the rolling value in `y`, while `{'y': 'y2'}` " "will add a column `y2` based on rolling values calculated " "from `y`, leaving the original column `y` unchanged.", example={"weekly_rolling_sales": "sales"}, ), ) rolling_type = fields.String( description="Type of rolling window. Any numpy function will work.", validate=validate.OneOf(choices=( "average", "argmin", "argmax", "cumsum", "cumprod", "max", "mean", "median", "nansum", "nanmin", "nanmax", "nanmean", "nanmedian", "nanpercentile", "min", "percentile", "prod", "product", "std", "sum", "var", )), required=True, example="percentile", ) window = fields.Integer( description="Size of the rolling window in days.", required=True, example=7, ) rolling_type_options = fields.Dict( desctiption="Optional options to pass to rolling method. Needed for " "e.g. quantile operation.", example={}, ) center = fields.Boolean( description= "Should the label be at the center of the window. Default: `false`", example=False, ) win_type = fields.String( description="Type of window function. See " "[SciPy window functions](https://docs.scipy.org/doc/scipy/reference" "/signal.windows.html#module-scipy.signal.windows) " "for more details. Some window functions require passing " "additional parameters to `rolling_type_options`. For instance, " "to use `gaussian`, the parameter `std` needs to be provided.", validate=validate.OneOf(choices=( "boxcar", "triang", "blackman", "hamming", "bartlett", "parzen", "bohman", "blackmanharris", "nuttall", "barthann", "kaiser", "gaussian", "general_gaussian", "slepian", "exponential", )), ) min_periods = fields.Integer( description= "The minimum amount of periods required for a row to be included " "in the result set.", example=7, )
class ChartDataQueryObjectSchema(Schema): annotation_layers = fields.List( fields.Nested(AnnotationLayerSchema), description="Annotation layers to apply to chart", allow_none=True, ) applied_time_extras = fields.Dict( description= "A mapping of temporal extras that have been applied to the query", required=False, example={"__time_range": "1 year ago : now"}, ) filters = fields.List(fields.Nested(ChartDataFilterSchema), required=False) granularity = fields.String( description= "Name of temporal column used for time filtering. For legacy Druid " "datasources this defines the time grain.", ) granularity_sqla = fields.String( description="Name of temporal column used for time filtering for SQL " "datasources. This field is deprecated, use `granularity` " "instead.", deprecated=True, ) groupby = fields.List( fields.String(description="Columns by which to group the query.", ), allow_none=True, ) metrics = fields.List( fields.Raw(), description="Aggregate expressions. Metrics can be passed as both " "references to datasource metrics (strings), or ad-hoc metrics" "which are defined only within the query object. See " "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.", ) post_processing = fields.List( fields.Nested(ChartDataPostProcessingOperationSchema, allow_none=True), description= "Post processing operations to be applied to the result set. " "Operations are applied to the result set in sequential order.", ) time_range = fields.String( description="A time rage, either expressed as a colon separated string " "`since : until` or human readable freeform. Valid formats for " "`since` and `until` are: \n" "- ISO 8601\n" "- X days/years/hours/day/year/weeks\n" "- X days/years/hours/day/year/weeks ago\n" "- X days/years/hours/day/year/weeks from now\n" "\n" "Additionally, the following freeform can be used:\n" "\n" "- Last day\n" "- Last week\n" "- Last month\n" "- Last quarter\n" "- Last year\n" "- No filter\n" "- Last X seconds/minutes/hours/days/weeks/months/years\n" "- Next X seconds/minutes/hours/days/weeks/months/years\n", example="Last week", ) time_shift = fields.String( description="A human-readable date/time string. " "Please refer to [parsdatetime](https://github.com/bear/parsedatetime) " "documentation for details on valid values.", ) is_timeseries = fields.Boolean( description="Is the `query_object` a timeseries.", required=False) timeseries_limit = fields.Integer( description="Maximum row count for timeseries queries. Default: `0`", ) timeseries_limit_metric = fields.Raw( description="Metric used to limit timeseries queries by.", allow_none=True, ) row_limit = fields.Integer( description='Maximum row count. Default: `config["ROW_LIMIT"]`', validate=[ Range(min=1, error=_("`row_limit` must be greater than or equal to 1")) ], ) row_offset = fields.Integer( description="Number of rows to skip. Default: `0`", validate=[ Range(min=0, error=_("`row_offset` must be greater than or equal to 0")) ], ) order_desc = fields.Boolean(description="Reverse order. Default: `false`", required=False) extras = fields.Nested( ChartDataExtrasSchema, description="Extra parameters to add to the query.", required=False, ) columns = fields.List( fields.String(), description="Columns which to select in the query.", allow_none=True, ) orderby = fields.List( fields.List(fields.Raw()), description= "Expects a list of lists where the first element is the column " "name which to sort by, and the second element is a boolean.", example=[["my_col_1", False], ["my_col_2", True]], ) where = fields.String( description="WHERE clause to be added to queries using AND operator." "This field is deprecated and should be passed to `extras`.", deprecated=True, ) having = fields.String( description="HAVING clause to be added to aggregate queries using " "AND operator. This field is deprecated and should be passed " "to `extras`.", deprecated=True, ) having_filters = fields.List( fields.Nested(ChartDataFilterSchema), description= "HAVING filters to be added to legacy Druid datasource queries. " "This field is deprecated and should be passed to `extras` " "as `having_druid`.", deprecated=True, ) druid_time_origin = fields.String( description="Starting point for time grain counting on legacy Druid " "datasources. Used to change e.g. Monday/Sunday first-day-of-week. " "This field is deprecated and should be passed to `extras` " "as `druid_time_origin`.", allow_none=True, ) url_params = fields.Dict( description= "Optional query parameters passed to a dashboard or Explore view", keys=fields.String(description="The query parameter"), values=fields.String(description="The value of the query parameter"), allow_none=True, )
class GetTodoListSchema(RequestSchema): complete = fields.Boolean()
class ChartDataResponseResult(Schema): annotation_data = fields.List( fields.Dict( keys=fields.String(description="Annotation layer name"), values=fields.String(), ), description="All requested annotation data", allow_none=True, ) cache_key = fields.String( description="Unique cache key for query object", required=True, allow_none=True, ) cached_dttm = fields.String( description="Cache timestamp", required=True, allow_none=True, ) cache_timeout = fields.Integer( description= "Cache timeout in following order: custom timeout, datasource " "timeout, default config timeout.", required=True, allow_none=True, ) error = fields.String( description="Error", allow_none=True, ) is_cached = fields.Boolean( description="Is the result cached", required=True, allow_none=None, ) query = fields.String( description="The executed query statement", required=True, allow_none=False, ) status = fields.String( description="Status of the query", validate=validate.OneOf(choices=( "stopped", "failed", "pending", "running", "scheduled", "success", "timed_out", )), allow_none=False, ) stacktrace = fields.String( desciption="Stacktrace if there was an error", allow_none=True, ) rowcount = fields.Integer( description="Amount of rows in result set", allow_none=False, ) data = fields.List(fields.Dict(), description="A list with results") applied_filters = fields.List(fields.Dict(), description="A list with applied filters") rejected_filters = fields.List(fields.Dict(), description="A list with rejected filters")
class IOSInterfaceOSPFAuth(Schema): key_chain = fields.Str() key = fields.Str() message_digest = fields.List(fields.Nested(IOSMessageDigest)) is_null = fields.Boolean(default=False)
class CreateTodoSchema(RequestSchema): complete = fields.Boolean(required=True) description = fields.String(required=True)
class RouteCreateSchema(Schema): id = fields.String() line = fields.Nested(LinecreateSchema) stations = fields.Nested(StationSchema, many=True, dump_only=True) direction = fields.Boolean() is_active = fields.Boolean()
class GutenbergFlagsSchema(SerializableSchema): class Meta: ordered = True languages = fields.String(metadata={ "label": "Languages", "description": "Comma-separated list of lang codes to filter export to (preferably ISO 639-1, else ISO 639-3) Defaults to all", }, ) formats = fields.String(metadata={ "label": "Formats", "description": "Comma-separated list of formats to filter export to (epub, html, pdf, all) Defaults to all", }, ) zim_title = fields.String( metadata={ "label": "Title", "description": "Custom title for your project and ZIM.", }, data_key="zim-title", ) zim_desc = fields.String( metadata={ "label": "Description", "description": "Description for ZIM" }, data_key="zim-desc", ) books = fields.String(metadata={ "label": "Books", "description": "Filter to only specific books ; separated by commas, or dashes for intervals. Defaults to all", }, ) concurrency = fields.Integer(metadata={ "label": "Concurrency", "description": "Number of concurrent threads to use", }, ) dlc = fields.Integer( metadata={ "label": "Download Concurrency", "description": "Number of parallel downloads to run (overrides concurrency)", }, data_key="dlc", ) # /!\ we are using a boolean flag for this while the actual option # expect an output folder for the ZIM files. # Given we can't set the output dir for regular mode, we're using this # flag to switch between the two and the path is set to the mount point # in command_for() (offliners.py) one_language_one_zim = fields.Boolean( truthy=[True, "/output"], falsy=[False], metadata={ "label": "Multiple ZIMs", "description": "Create one ZIM per language", }, data_key="one-language-one-zim", ) no_index = fields.Boolean( truthy=[True], falsy=[False], metadata={ "label": "No Index", "description": "Do not create full-text index within ZIM file", }, data_key="no-index", ) title_search = fields.Boolean( truthy=[True], falsy=[False], metadata={ "label": "Title search", "description": "Search by title feature (⚠️ does not scale)", }, data_key="title-search", ) bookshelves = fields.Boolean( truthy=[True], falsy=[False], metadata={ "label": "Bookshelves", "description": "Browse by bookshelves feature", }, ) optimization_cache = fields.Url( metadata={ "label": "Optimization Cache URL", "description": "S3 Storage URL including credentials and bucket", "secret": True, }, data_key="optimization-cache", ) use_any_optimized_version = fields.Boolean( truthy=[True], falsy=[False], data_key="--use-any-optimized-version")
class PlateAppearance(Schema): pa_id = fields.Integer() play = fields.String() game_id = fields.String() year = fields.Integer() date = fields.DateTime(format='%Y/%m/%d') batter_id = fields.String() batter_team = fields.String() batter_hand = fields.String() pitcher_id = fields.String() pitcher_team = fields.String() pitcher_hand = fields.String() inning = fields.Integer() is_home = fields.Boolean(data_key='batting_team_home') outs = fields.Integer() balls = fields.Integer() strikes = fields.Integer() pitches = fields.String(data_key='sequence') away_runs = fields.Integer() home_runs = fields.Integer() first_runner_id = fields.String() second_runner_id = fields.String() third_runner_id = fields.String() field_pos = fields.Integer() lineup_pos = fields.Integer() event_type = fields.Integer() ab_flag = fields.Boolean() pa_flag = fields.Boolean() sp_flag = fields.Boolean() hit_val = fields.Integer() sac_bunt = fields.Boolean() sac_fly = fields.Boolean() outs_on_play = fields.Integer() rbi = fields.Integer() runs_on_play = fields.Integer() first_scorer = fields.String() second_scorer = fields.String() third_scorer = fields.String() fourth_scorer = fields.String() first_runner_event = fields.String() second_runner_event = fields.String() third_runner_event = fields.String() wp = fields.Boolean() pb = fields.Boolean() fielder_id = fields.String() ball_type = fields.String() bunt_flag = fields.Boolean() foul_flag = fields.Boolean() hit_loc = fields.Integer() first_error = fields.String() second_error = fields.String() third_error = fields.String() num_errors = fields.Integer() batter_dest = fields.String() first_dest = fields.String() second_dest = fields.String() third_dest = fields.String() first_po = fields.String() second_po = fields.String() third_po = fields.String() first_ast = fields.String() second_ast = fields.String() third_ast = fields.String() fourth_ast = fields.String() fifth_ast = fields.String() catcher = fields.String() first_base = fields.String() second_base = fields.String() third_base = fields.String() shortstop = fields.String() left_field = fields.String() center_field = fields.String() right_field = fields.String() @pre_dump def expand_play_str(self, data, **kwargs): data['game_id'] = self.context['game_id'] data['date'] = self.context['date'] data['outs'] = self.context['outs'] data['home_runs'] = self.context['home_runs'] data['away_runs'] = self.context['away_runs'] data['first_runner_id'] = self.context['runners_before'][1] data['second_runner_id'] = self.context['runners_before'][2] data['third_runner_id'] = self.context['runners_before'][3] if data['is_home']: data['sp_flag'] = self.context['lineups']['away_field_pos'][ 'sp'] == data['pitcher_id'] data['batter_team'] = self.context['home_team'] data['pitcher_team'] = self.context['away_team'] else: data['sp_flag'] = self.context['lineups']['home_field_pos'][ 'sp'] == data['pitcher_id'] data['batter_team'] = self.context['away_team'] data['pitcher_team'] = self.context['home_team'] for pos in range(2, 10): if data['is_home']: data[pos_dict[pos]] = self.context['lineups'][ 'away_field_pos'][pos] else: data[pos_dict[pos]] = self.context['lineups'][ 'home_field_pos'][pos] return data @pre_dump def count(self, data, **kwargs): if len(data['count']) > 1: if data['count'][0].isdigit(): data['balls'] = int(data['count'][0]) if data['count'][1].isdigit(): data['strikes'] = int(data['count'][1]) return data @pre_dump def get_year(self, data, **kwargs): data['year'] = data['date'].year return data @post_dump def update_state(self, data, **kwargs): self.context['outs'] += data['outs_on_play'] if data['third_dest'] in set(['3']): self.context['runners_before'][int( data['third_dest'])] = self.context['runners_before'][3] self.context['responsible_pitchers'][ data['third_dest']] = self.context['responsible_pitchers']['3'] if data['third_dest'] in set(['O', 'H']): self.context['runners_before'][3] = '' self.context['responsible_pitchers']['3'] = '' if data['second_dest'] in set(['2', '3']): self.context['runners_before'][int( data['second_dest'])] = self.context['runners_before'][2] self.context['responsible_pitchers'][data[ 'second_dest']] = self.context['responsible_pitchers']['2'] if data['second_dest'] in set(['3', 'O', 'H']): self.context['runners_before'][2] = '' self.context['responsible_pitchers']['2'] = '' if data['first_dest'] in set(['1', '2', '3']): self.context['runners_before'][int( data['first_dest'])] = self.context['runners_before'][1] self.context['responsible_pitchers'][ data['first_dest']] = self.context['responsible_pitchers']['1'] if data['first_dest'] in set(['2', '3', 'O', 'H']): self.context['runners_before'][1] = '' self.context['responsible_pitchers']['1'] = '' if data['batter_dest'] in set(['1', '2', '3']): self.context['runners_before'][int( data['batter_dest'])] = data['batter_id'] self.context['responsible_pitchers'][data[ 'batter_dest']] = self.context['responsible_pitchers']['B'] if data['batting_team_home']: self.context['home_runs'] += data['runs_on_play'] else: self.context['away_runs'] += data['runs_on_play'] if self.context['outs'] == 3: self.context['runners_before'][1] = '' self.context['runners_before'][2] = '' self.context['runners_before'][3] = '' self.context['outs'] = 0 self.context['responsible_pitchers']['1'] = '' self.context['responsible_pitchers']['2'] = '' self.context['responsible_pitchers']['3'] = '' self.context['responsible_pitchers']['B'] = '' self.context['po'] = 0 self.context['ast'] = 0 return data
class FuncKeySchema(BaseSchema): id = fields.Integer(dump_only=True) inherited = fields.Boolean(dump_only=True) blf = StrictBoolean() label = fields.String(allow_none=True) destination = FuncKeyDestinationField(BaseDestinationSchema, required=True)
class RegistrationStatus(Schema): """Defines schema for checking RegistrationList Status.""" status = fields.String() provisional_only = fields.Boolean()
class StolenStatus(Schema): """Defines schema for StolenList status.""" status = fields.String() provisional_only = fields.Boolean()
class ZimitFlagsSchema(SerializableSchema): class Meta: ordered = True url = fields.Url( metadata={ "label": "URL", "description": "The URL to start crawling from and main page for ZIM", }, required=True, ) name = fields.String( metadata={ "label": "Name", "description": "Name of the ZIM. " "Used to compose filename if not otherwise defined", }, required=True, ) lang = fields.String( metadata={ "label": "Language", "description": "ISO-639-3 (3 chars) language code of content. " "Default to `eng`", }) title = fields.String( metadata={ "label": "Title", "description": "Custom title for ZIM. Default to title of main page", }) description = fields.String(metadata={ "label": "Description", "description": "Description for ZIM" }) favicon = fields.Url( metadata={ "label": "Favicon", "description": "URL for Favicon. " "If unspecified, will attempt to use the one used from main page.", }, required=False, ) zim_file = fields.String( metadata={ "label": "ZIM filename", "description": "ZIM file name (based on --name if not provided)", }, data_key="zim-file", ) tags = fields.String(metadata={ "label": "ZIM Tags", "description": "List of Tags for the ZIM file.", }) creator = fields.String(metadata={ "label": "Content Creator", "description": "Name of content creator.", }) source = fields.String(metadata={ "label": "Content Source", "description": "Source name/URL of content", }) workers = fields.Integer( metadata={ "label": "Workers", "description": "The number of workers to run in parallel. Default to 1", }, required=False, ) include_domains = fields.String( metadata={ "label": "Include domains", "description": "Limit to URLs from only certain domains. " "If not set, all URLs are included.", }, data_key="include-domains", required=False, ) exclude = fields.String( metadata={ "label": "Exclude", "description": "Regex of URLs that should be excluded from the crawl.", }, required=False, ) wait_until = fields.String( metadata={ "label": "WaitUntil", "description": "Puppeteer page.goto() condition to wait for " "before continuing. Default to `load`", }, data_key="waitUntil", required=False, ) limit = fields.Integer(metadata={ "label": "Limit", "description": "Limit crawl to this number of pages. 0 means no-limit.", }, ) timeout = fields.Integer( metadata={ "label": "Timeout", "description": "Timeout for each page to load (in millis). " "Default to 30000", }, required=False, ) scope = fields.String( metadata={ "label": "Scope", "description": "The scope of current page that should be included in the " "crawl (defaults to the domain of URL)", }, required=False, ) scroll = fields.Boolean( truthy=[True], falsy=[False], metadata={ "label": "Scroll", "description": "If set, will autoscroll pages to bottom.", }, required=False, ) new_context = StringEnum( metadata={ "label": "New Context", "description": "The context for each new capture. Defaults to page", }, validate=validate.OneOf(["page", "session", "browser"]), data_key="newContext", required=False, ) verbose = fields.Boolean( truthy=[True], falsy=[False], metadata={ "label": "Verbose mode", "description": "Whether to display additional logs", }, required=False, ) output = fields.String( metadata={ "label": "Output folder", "placeholder": "/output", "description": "Output folder for ZIM file(s). Leave it as `/output`", }, missing="/output", default="/output", validate=validate_output, ) replay_viewer_source = fields.Url( metadata={ "label": "Replay Viewer Source", "description": "URL from which to load the ReplayWeb.page " "replay viewer from", }, data_key="replay-viewer-source", required=False, ) use_sitemap = fields.Url( metadata={ "label": "Use sitemap", "description": "Use as sitemap to get additional URLs for the crawl " "(usually at /sitemap.xml)", }, data_key="useSitemap", required=False, ) mobile_device = fields.String( metadata={ "label": "As device", "description": "Device to crawl as. Defaults to `Iphone X`. " "See Pupeeter's DeviceDescriptors.", }, data_key="mobileDevice", required=False, ) admin_email = fields.String( metadata={ "label": "Admin Email", "description": "Admin Email for crawler: used in UserAgent " "so website admin can contact us", }, data_key="adminEmail", required=False, )
class FormatSchemaV1(TaxonomySchemaV1): resolution = SanitizedUnicode() spec = SanitizedUnicode() selectable = fields.Boolean(default=True)
class CommandResultSchema(Schema): result = fields.Boolean(required=True) value = fields.String(required=True)
class ChartDataQueryObjectSchema(Schema): filters = fields.List(fields.Nested(ChartDataFilterSchema), required=False) granularity = fields.String( description= "Name of temporal column used for time filtering. For legacy Druid " "datasources this defines the time grain.", ) granularity_sqla = fields.String( description="Name of temporal column used for time filtering for SQL " "datasources. This field is deprecated, use `granularity` " "instead.", deprecated=True, ) groupby = fields.List( fields.String(description="Columns by which to group the query.", ), ) metrics = fields.List( fields.Raw(), description="Aggregate expressions. Metrics can be passed as both " "references to datasource metrics (strings), or ad-hoc metrics" "which are defined only within the query object. See " "`ChartDataAdhocMetricSchema` for the structure of ad-hoc metrics.", ) post_processing = fields.List( fields.Nested(ChartDataPostProcessingOperationSchema), description= "Post processing operations to be applied to the result set. " "Operations are applied to the result set in sequential order.", ) time_range = fields.String( description="A time rage, either expressed as a colon separated string " "`since : until` or human readable freeform. Valid formats for " "`since` and `until` are: \n" "- ISO 8601\n" "- X days/years/hours/day/year/weeks\n" "- X days/years/hours/day/year/weeks ago\n" "- X days/years/hours/day/year/weeks from now\n" "\n" "Additionally, the following freeform can be used:\n" "\n" "- Last day\n" "- Last week\n" "- Last month\n" "- Last quarter\n" "- Last year\n" "- No filter\n" "- Last X seconds/minutes/hours/days/weeks/months/years\n" "- Next X seconds/minutes/hours/days/weeks/months/years\n", example="Last week", ) time_shift = fields.String( description="A human-readable date/time string. " "Please refer to [parsdatetime](https://github.com/bear/parsedatetime) " "documentation for details on valid values.", ) is_timeseries = fields.Boolean( description="Is the `query_object` a timeseries.", required=False) timeseries_limit = fields.Integer( description="Maximum row count for timeseries queries. Default: `0`", ) row_limit = fields.Integer( description='Maximum row count. Default: `config["ROW_LIMIT"]`', ) order_desc = fields.Boolean(description="Reverse order. Default: `false`", required=False) extras = fields.Nested(ChartDataExtrasSchema, required=False) columns = fields.List( fields.String(), description="", ) orderby = fields.List( fields.List(fields.Raw()), description= "Expects a list of lists where the first element is the column " "name which to sort by, and the second element is a boolean ", example=[["my_col_1", False], ["my_col_2", True]], ) where = fields.String( description="WHERE clause to be added to queries using AND operator." "This field is deprecated and should be passed to `extras`.", deprecated=True, ) having = fields.String( description="HAVING clause to be added to aggregate queries using " "AND operator. This field is deprecated and should be passed " "to `extras`.", deprecated=True, ) having_filters = fields.List( fields.Dict(), description= "HAVING filters to be added to legacy Druid datasource queries. " "This field is deprecated and should be passed to `extras` " "as `filters_druid`.", deprecated=True, )
class AssertionSchema(BaseSchema): passed = fields.Boolean()
class IOSInterfaceMPLS(Schema): ldp = fields.Boolean(default=False) mpls_te = fields.Boolean(default=False)
class RegexFindIterSchema(RegexSchema): condition_match = fields.Boolean() condition = custom_fields.NativeOrPretty()
class UpdateTodoSchema(RequestSchema): complete = fields.Boolean() description = fields.String()
class IndexerSchema(Schema): name = fields.String() module = fields.String() enabled = fields.Boolean() settings = fields.String()
class TodoSchema(ResponseSchema): id = fields.Integer(required=True) complete = fields.Boolean(required=True) description = fields.String(required=True)
class TicketSchema(Schema): class Meta: unknown = EXCLUDE api_type = 'tickets' url = 'tickets' model = Ticket id = fields.Int() external_ticket_id = fields.String(data_key='externalTicketId') external_movement_id = fields.String(data_key='externalMovementId', required=False, missing=None) seat = fields.String(required=False, missing=None) qr_code_url = fields.String(data_key='qrCodeUrl', required=False, missing=None) session_date = fields.AwareDateTime(data_key='sessionDate', allow_none=False) title = fields.String(required=None, missing=False) external_event_id = fields.String(data_key='eventUid', required=False, missing=None) barcode = fields.String(data_key='barCode', required=False, missing=None) sector_name = fields.String(data_key='sectorName', required=False, missing=None) venue_name = fields.String(data_key='venueName', required=False, missing=None) venue_room = fields.String(data_key='venueRoom', required=False, missing=None) client_name = fields.String(data_key='clientName', required=False, missing=None) premium = fields.Boolean() client_email = fields.String(data_key='clientEmail', required=False, missing=None) price = fields.Int(required=False, missing=None) share_link = fields.String(data_key='shareLink', required=False, missing=None) external_customer_ref = fields.String(data_key='externalCustomerRef', required=False, missing=None) entrance = fields.String(required=False, missing=None) section = fields.String(required=False, missing=None) row = fields.String(required=False, missing=None) status = fields.String(required=False, missing=None) price_code = fields.String(data_key='priceCode', required=False, missing=None) created_at = fields.AwareDateTime(data_key='createdAt', allow_none=False) updated_at = fields.AwareDateTime(data_key='updatedAt', allow_none=False) user_id = RelatedResourceLinkField(schema=UserSchema, required=False, missing=None, data_key='user', microservice_aware=False) can_share = fields.Boolean(data_key='canShare', allow_none=False, required=False, missing=False) share_code = fields.String(data_key='shareCode', allow_none=True, required=False, missing=None) sharer_email = fields.String(data_key='sharerEmail', allow_none=True, required=False, missing=None) redeemer_email = fields.String(data_key='redeemerEmail', allow_none=True, required=False, missing=None) redeemed_at = fields.AwareDateTime(data_key='redeemedAt', required=False, missing=None) shared_at = fields.AwareDateTime(data_key='sharedAt', required=False, missing=None) sharer_id = RelatedResourceLinkField(schema=UserSchema, required=False, missing=None, data_key='sharer') redeemer_id = RelatedResourceLinkField(schema=UserSchema, required=False, missing=None, data_key='redeemer') event_date = RelatedResourceLinkField(schema=EventDateSchema, required=False, missing=None, data_key='eventDate') parent_ticket = PolyField( deserialization_schema_selector=parent_ticket_selector, data_key='parentTicket', required=False, missing=None, allow_none=True) legal_short_text = fields.String(data_key='legalShortText', required=False, allow_none=True, missing=None) legal_long_text = fields.String(data_key='legalLongText', required=False, allow_none=True, missing=None) map_url = fields.String(data_key='mapUrl', required=False, allow_none=True, missing=None) map_image_url = fields.String(data_key='mapImageUrl', required=False, allow_none=True, missing=None) ticket_integration = RelatedResourceField(schema=TicketIntegrationSchema, required=False, missing=None, data_key='ticketIntegration') ticket_auth = RelatedResourceField(schema=TicketTicketAuthSchema, data_key='ticketAuth', missing=None, allow_none=True) event = RelatedResourceLinkField(schema=EventSchema, required=False, missing=None) venue = RelatedResourceLinkField(schema=VenueSchema, required=False, missing=None)
class ChartFavStarResponseResult(Schema): id = fields.Integer(description="The Chart id") value = fields.Boolean(description="The FaveStar value")
class NotAvailableYet(Schema): ready = fields.Boolean(enum=[False])
class AnnotationLayerSchema(Schema): annotationType = fields.String( description="Type of annotation layer", validate=validate.OneOf(choices=[ann.value for ann in AnnotationType]), ) color = fields.String( description="Layer color", allow_none=True, ) descriptionColumns = fields.List( fields.String(), description="Columns to use as the description. If none are provided, " "all will be shown.", ) hideLine = fields.Boolean( description="Should line be hidden. Only applies to line annotations", allow_none=True, ) intervalEndColumn = fields.String( description=( "Column containing end of interval. Only applies to interval layers" ), allow_none=True, ) name = fields.String(description="Name of layer", required=True) opacity = fields.String( description="Opacity of layer", validate=validate.OneOf(choices=("", "opacityLow", "opacityMedium", "opacityHigh"), ), allow_none=True, required=False, ) overrides = fields.Dict( keys=fields.String( desciption="Name of property to be overridden", validate=validate.OneOf(choices=("granularity", "time_grain_sqla", "time_range", "time_shift"), ), ), values=fields.Raw(allow_none=True), description="which properties should be overridable", allow_none=True, ) show = fields.Boolean(description="Should the layer be shown", required=True) showMarkers = fields.Boolean( description= "Should markers be shown. Only applies to line annotations.", required=True, ) sourceType = fields.String( description="Type of source for annotation data", validate=validate.OneOf(choices=( "", "line", "NATIVE", "table", )), ) style = fields.String( description="Line style. Only applies to time-series annotations", validate=validate.OneOf(choices=( "dashed", "dotted", "solid", "longDashed", )), ) timeColumn = fields.String( description="Column with event date or interval start date", allow_none=True, ) titleColumn = fields.String( description="Column with title", allow_none=True, ) width = fields.Float( description="Width of annotation line", validate=[ Range( min=0, min_inclusive=True, error=_("`width` must be greater or equal to 0"), ) ], ) value = fields.Raw( description="For formula annotations, this contains the formula. " "For other types, this is the primary key of the source object.", required=True, )
class EventSchemaV1(TaxonomySchemaV1): web = fields.URL() organizer = SanitizedUnicode() startDate = SanitizedUnicode() endDate = SanitizedUnicode() selectable = fields.Boolean(default=True)
class UserSchema(BaseModelSchema): email = fields.Email() confirmed = fields.Boolean(allow_none=True) company_id = fields.Integer() company = fields.Nested(CompanySchema, many=False) permissions = EnumField(UserPermissions)
class PlantDetailsSchema(Schema): name = fields.Str(required=True, validate=lambda x: len(x) >= 3) # sprout - time = fields.Str(required=True, validate=lambda x: len(x) >= 5) # full - growth = fields.Str(required=True, validate=lambda x: len(x) >= 5) edible = fields.Boolean()