class EventoSchema(Schema): id = fields.Integer() evento = EnumEvento() horario = fields.DateTime() id_usuario = fields.Integer() id_sala = fields.Integer()
class ItemSchema(ma.SQLAlchemySchema): item_id = fields.Integer() item_name = fields.Str() note = fields.Str() created_at = fields.DateTime() updated_at = fields.DateTime()
class ArticleSummarySchema(Schema): published_at = fields.DateTime(required=True) author = fields.Str(required=True) title = fields.Str(required=True) summary = fields.Str(required=True) url = fields.Url(required=True)
def test_datetime_format(self): format = "%Y-%m-%d" field = fields.DateTime(format=format) assert field.output("created", self.user) == self.user.created.strftime(format)
class BaseSchema: created_at = fields.DateTime(format='%Y-%m-%d %H:%M:%S') updated_at = fields.DateTime(format='%Y-%m-%d %H:%M:%S')
def test_iso_datetime_field_deserialization(self, fmt): dtime = dt.datetime.now() datestring = dtime.isoformat() field = fields.DateTime(format=fmt) assert_datetime_equal(field.deserialize(datestring), dtime)
def test_datetime_field(self): field = fields.DateTime() expected = utils.rfcformat(self.user.created, localtime=False) assert field.output("created", self.user) == expected
class ResultForPivotJSON(Schema): ddate = fields.DateTime(format='%Y-%m-%d') qty_all = fields.Integer() qty_ok = fields.Integer()
class ResultForPivotCreateFeatureJSON(Schema): ddate = fields.DateTime(format='%Y-%m-%d') qty = fields.Integer()
class ElasticSearchBaseSchema: """ElasticSearch specific extra response data.""" _created = fields.DateTime(dump_only=True, attribute="created") _updated = fields.DateTime(dump_only=True, attribute="updated")
class UserSchema(Schema): id = fields.Str(dump_only=True) created_at = fields.DateTime() username = fields.Str()
class GroupDownloadOperationParamsV1Schema(Schema): since = fields.DateTime(allow_none=True) @post_load def make(self, data, **kwargs): return GroupDownloadOperationParams(**data)
from pyspark.sql import Row from pyspark.sql.types import * from marshmallow_pyspark.constants import * from marshmallow_pyspark.schema import Schema, _RowValidator def test_create(): schema = Schema() assert schema.error_column_name == DEFAULT_ERRORS_COLUMN assert schema.split_errors == DEFAULT_SPLIT_ERRORS @pytest.mark.parametrize( "ma_field, spark_field", [(fields.String(), StringType()), (fields.DateTime(), TimestampType()), (fields.Date(), DateType()), (fields.Boolean(), BooleanType()), (fields.Integer(), IntegerType()), (fields.Number(), DoubleType()), (fields.List(fields.String()), ArrayType(StringType())), (fields.Nested(Schema.from_dict({"name": fields.String()})), StructType([StructField("name", StringType())]))]) def test_spark_schema(ma_field, spark_field): class TestSchema(Schema): test_column = ma_field spark_schema = StructType([ StructField("test_column", spark_field, nullable=True), StructField(DEFAULT_ERRORS_COLUMN, StringType(), nullable=True) ]) schema = TestSchema() assert schema.spark_schema == spark_schema
class NewsStatusSchema(ma.Schema): status_id = fields.Integer() news_status = fields.String() created_date = fields.DateTime()
class FollowSchema(Schema): id = fields.Int(dump_only=True) follow_id = fields.Int(required=False) user_id = fields.Int(required=False) created_at = fields.DateTime(dump_only=True) modified_at = fields.DateTime(dump_only=True)
class ReadingSerializer(Serializer): created_at = fields.DateTime(format="%s") class Meta: fields = ("id", "value", "created_at")
def test_rfc_datetime_field_deserialization(self, fmt): dtime = dt.datetime.now() datestring = utils.rfcformat(dtime) field = fields.DateTime(format=fmt) assert_datetime_equal(field.deserialize(datestring), dtime)
class UpdateEventSchema(BaseSchema): name = fields.Str(required=False, validate=Length(max=100)) description = fields.Str(required=False, validate=Length(max=1000)) organizer = fields.Str(required=False, validate=Length(max=100)) datetime_of_event = fields.DateTime(required=False)
import logging from threading import Lock from flask_restful import Resource, Api, request from flask_jwt_extended import get_jwt_identity, jwt_required from activity.service.vote_rate import VoteRate from activity.model.entities import VoteModel from marshmallow import Schema, fields VoteSchema = Schema.from_dict({ "id": fields.Int(), "user_id": fields.Int(), "restaurant_id": fields.Int(), "vote_rate": fields.Float(), "voted_at": fields.DateTime() }) _schema = VoteSchema() vote_lock = Lock() _vote_rate = VoteRate() a = Lock() call = 0 expected_rates = {1: 1.0, 2: 0.5, 3: 0.25, 4: 0.25} def ss(user_id: int, restaurant_id: int): global call
class HostSchema(Schema): class Meta: ordered = True name = NameField( required=True, data_key="hostname" ) # hostname for network use (i.e. fqdn, hostname, or, if necessary, IP) control_name = fields.Str( allow_none=True, missing=None) # i.e., container ID for DOCKER (optional) johann_id = fields.UUID(allow_none=True, missing=None) image = fields.Str(allow_none=True, missing=None) user = fields.Str(allow_none=True, missing=None) pwd_env = fields.Str(allow_none=True, missing=None) os = EnumField(HostOS, allow_none=True, missing=None) python_path = fields.Str(allow_none=True, missing=None) python_ver = LaxStringField(allow_none=True, missing=None) pmtr_variant = EnumField(PmtrVariant, allow_none=True, missing=None) control_method = fields.Str(allow_none=True, missing=None) pip_offline_install = fields.Boolean(allow_none=True, missing=None) tuning = fields.Boolean(dump_only=True) pending_create = fields.Boolean(dump_only=True) celery_task_ids = fields.List( fields.Str(), dump_only=True ) # note that finished tasks may be cleared from this list at any time last_checked_exists = fields.DateTime(dump_only=True) @validates("python_ver") def validate_python_ver(self, value): if value is not None and value not in config.SUPPORTED_PYTHON_VERSIONS: raise MarshmallowValidationError( f"Unsupported python version '{value}'") @validates("control_method") def validate_control_method(self, value): if value is not None and value not in config.HOST_CONTROL_CLASS_NAMES: raise MarshmallowValidationError( f"Unrecognized control method '{value}'") @post_load def make_host(self, data: Dict[str, Any], **kwargs) -> "Host": name = data["name"] control_name = data["control_name"] # validate name and control_name if name != safe_name(name): raise MarshmallowValidationError( f"Name '{name}' does not appear to be a valid hostname") if control_name and control_name != safe_name(control_name): raise MarshmallowValidationError( f"Control name '{control_name}' does not appear to be valid") # validate pmtr variant if data["pmtr_variant"]: try: PmtrVariant(data["pmtr_variant"]) except ValueError: raise MarshmallowValidationError( f"PMTR variant '{data['pmtr_variant']}' is invalid") # validate control method if (data["control_method"] and data["control_method"] not in config.HOST_CONTROL_CLASS_NAMES): raise MarshmallowValidationError( f"Control method '{data['control_method']}' is invalid") return Host(**data)
def test_datetime_iso8601(self): field = fields.DateTime(format="iso") expected = utils.isoformat(self.user.created, localtime=False) assert field.output("created", self.user) == expected
class TokenSchema(Schema): token = fields.UUID() expiry = fields.DateTime()
class DateFormatSerializer(Serializer): updated = fields.DateTime(format="%m-%d") class Meta: fields = ('created', 'updated')
'lng': fields.Float(required=True), 'frontal': fields.Bool(required=True), 'tilt': fields.Bool(required=True), 'fire': fields.Bool(required=True), 'fall': fields.Bool(required=True), 'temp': fields.Float(required=True), 'license_plate': fields.Str(required=True, validate=validate.Length(7)), 'date': fields.DateTime(required=True), }) class Bridge: def __init__(self): self.api_ip = '192.168.1.18:8000' self.api_version = Setting.API_VERSION self.in_buffer = [] self.data = [] self.ser = None self.port_name = None self.from_zone = tz.gettz('UTC') self.to_zone = tz.tzlocal() def validate_json_data(self, body):
class FatalODSchema(pl.BaseSchema): #death_date = fields.Date(format='%m/%d/%Y', load_only=True, allow_none=True) #death_time = fields.Time(format='%I:%M %p', load_only=True, allow_none=True) #death_date_and_time = fields.DateTime(dump_only=True) death_date_and_time = fields.DateTime(load_from="death.date.time", allow_none=True) manner_of_death = fields.String(load_from='manner.of.death') age = fields.Integer(allow_none=True) sex = fields.String(allow_none=True) race = fields.String(allow_none=True) case_dispo = fields.String(load_from='case.dispo', allow_none=True) # combined_od1 = fields.String(load_from='combined.od1', allow_none=True) combined_od2 = fields.String(load_from='combined.od2', allow_none=True) combined_od3 = fields.String(load_from='combined.od3', allow_none=True) combined_od4 = fields.String(load_from='combined.od4', allow_none=True) combined_od5 = fields.String(load_from='combined.od5', allow_none=True) combined_od6 = fields.String(load_from='combined.od6', allow_none=True) combined_od7 = fields.String(load_from='combined.od7', allow_none=True) combined_od8 = fields.String(load_from='combined.od8', allow_none=True) combined_od9 = fields.String(load_from='combined.od9', allow_none=True) combined_od10 = fields.String(load_from='combined.od10', allow_none=True) incident_zip = fields.String(load_from="incident.zip", allow_none=True) decedent_zip = fields.String(load_from="decedent.zip", allow_none=True) # case_year = fields.Integer(load_from='case.year', allow_none=True) class Meta: ordered = True #@post_load #def combine_date_and_time(self, in_data): # death_date, death_time = in_data['death_date'], in_data['death_time'] # today = datetime.datetime.today() # if not death_time: # death_time = datetime.time(0, 0, 0) # try: # in_data['death_date_and_time'] = datetime.datetime( # death_date.year, death_date.month, death_date.day, # death_time.hour, death_time.minute, death_time.second # ) # except: # in_data['death_date_and_time'] = None # return @pre_load def fix_zip_codes(self, data): # The source file has some weird ZIP codes like "15025-CLAR" # which appears to be a convention of appending an abbrevation # for the township to distinguish between multiple neighborhoods # and townships in the same ZIP code. # Other oddballs: "15-71" fields = ['incident.zip'] if 'decedent.zip' in data: fields.append(['decedent.zip']) for field in fields: if data[field] in ['NA']: data[field] = None elif len(data[field]) > 5: data[field] = data[field][:5] # This is a simple truncation # which is now already happening at the query level when # the County generates the source file, so really this # step should not be necessary. @pre_load def check_manner(self, data): field = 'manner.of.death' if data[field] not in ['Accident', 'Accidents']: raise ValueError(f"A record with a non-allowed {field} value was detected ({data[field]}).") @pre_load def fix_nas(self, data): for k, v in data.items(): if k in ['death.date.time', 'age', 'case.year', 'sex', 'race', 'manner.of.death'] or k[:11] == 'combined.od': if v in ['NA']: data[k] = None
class HostSchema(Schema): display_name = fields.Str(validate=validate.Length(min=1, max=200)) ansible_host = fields.Str(validate=validate.Length(min=0, max=255)) account = fields.Str(required=True, validate=validate.Length(min=1, max=10)) insights_id = fields.Str(validate=verify_uuid_format) rhel_machine_id = fields.Str(validate=verify_uuid_format) subscription_manager_id = fields.Str(validate=verify_uuid_format) satellite_id = fields.Str(validate=verify_satellite_id) fqdn = fields.Str(validate=validate.Length(min=1, max=255)) bios_uuid = fields.Str(validate=verify_uuid_format) ip_addresses = fields.List( fields.Str(validate=validate.Length(min=1, max=255)), validate=validate.Length(min=1)) mac_addresses = fields.List( fields.Str(validate=validate.Length(min=1, max=59)), validate=validate.Length(min=1)) external_id = fields.Str(validate=validate.Length(min=1, max=500)) facts = fields.List(fields.Nested(FactsSchema)) tags = fields.Raw(allow_none=True) system_profile = fields.Nested(SystemProfileSchema) stale_timestamp = fields.DateTime(required=True, timezone=True) reporter = fields.Str(required=True, validate=validate.Length(min=1, max=255)) @validates("stale_timestamp") def has_timezone_info(self, timestamp): if timestamp.tzinfo is None: raise ValidationError("Timestamp must contain timezone info") @validates("tags") def validate_tags(self, tags): if isinstance(tags, list): return self._validate_tags_list(tags) elif isinstance(tags, dict): return self._validate_tags_dict(tags) elif tags is None: return True else: raise ValidationError( "Tags must be either an object, an array or null.") @staticmethod def _validate_tags_list(tags): TagsSchema(many=True, strict=True).validate(tags) return True @staticmethod def _validate_tags_dict(tags): for namespace, ns_tags in tags.items(): TAG_NAMESPACE_VALIDATION(namespace) if ns_tags is None: continue if not isinstance(ns_tags, dict): raise ValidationError( "Tags in a namespace must be an object or null.") for key, values in ns_tags.items(): TAG_KEY_VALIDATION(key) if values is None: continue if not isinstance(values, list): raise ValidationError( "Tag values must be an array or null.") for value in values: if value is None: continue if not isinstance(value, str): raise ValidationError( "Tag value must be a string or null.") TAG_VALUE_VALIDATION(value) return True
class SchemaMixin: id = fields.Str() created_by = fields.Str() created_on = fields.DateTime(dump_only=True) last_modified_by = fields.Str() last_modified_on = fields.DateTime(dump_only=True)
class Product(Schema): name = fields.String(validate=validate.Length(max=10), required=True) rating = fields.Integer(missing=None, validate=validate.Range(min=0, max=100)) created = fields.DateTime()
#!/usr/bin/env python3 # @generated AUTOGENERATED file. Do not Change! from dataclasses import dataclass, field from datetime import datetime from functools import partial from gql.gql.datetime_utils import fromisoformat from numbers import Number from typing import Any, Callable, List, Mapping, Optional from dataclasses_json import dataclass_json from marshmallow import fields as marshmallow_fields DATETIME_FIELD = field( metadata={ "dataclasses_json": { "encoder": datetime.isoformat, "decoder": fromisoformat, "mm_field": marshmallow_fields.DateTime(format="iso"), } }) @dataclass_json @dataclass class AddCustomerInput: name: str externalId: Optional[str] = None
class CommentSchema(ma.Schema): # Marshmallow definition of json output for Comment id = fields.Integer(dump_only=True) category_id = fields.Integer(required=True) comment = fields.String(required=True, validate=validate.Length(1)) creation_date = fields.DateTime()