Пример #1
0
class ControllerSchema(ma.ModelSchema):
    class Meta:
        model = Controller
        fields = ('id', 'connected', 'name', 'devices', 'loops', 'description',
                  'uri')

    devices = ma.List(
        PolyField(serialization_schema_selector=
                  controller_device_schema_serialization_disambiguation, ))

    loops = ma.List(
        PolyField(serialization_schema_selector=
                  controller_loop_schema_serialization_disambiguation, ))
Пример #2
0
class RoleAssignmentDomainSchema(BaseSchema):
    scope = fields.Str()
    identifiers = PolyField(
        serialization_schema_selector=_domain_identifier_schema_selector,
        deserialization_schema_selector=_domain_identifier_schema_selector,
        required=False,
    )
Пример #3
0
class ControllerProfileSchema(ma.ModelSchema):
    class Meta:
        model = ControllerProfile
        fields = ('name', 'blocks')

    blocks = ma.List(
        PolyField(serialization_schema_selector=
                  controller_block_schema_serialization_disambiguation, ))
Пример #4
0
class EventsListSchema(BaseListSchema):
    data = PolyField(
        serialization_schema_selector=serialize_schema_selector,
        deserialization_schema_selector=deserialize_schema_selector,
        many=True)

    class Meta:
        strict = True
        decoding_class = EventsList
Пример #5
0
class ToolNitpickSectionSchema(BaseNitpickSchema):
    """Validation schema for the ``[tool.nitpick]`` section on ``pyproject.toml``."""

    error_messages = {
        "unknown":
        help_message("Unknown configuration", "tool_nitpick_section.html")
    }

    style = PolyField(
        deserialization_schema_selector=fields.string_or_list_field)
Пример #6
0
class AllIdentifiersFileSchema(CamelCaseSchema):
    data = fields.Dict(
        keys=fields.Str(),
        values=PolyField(
            serialization_schema_selector=card_set_serialization_disambiguation,
            deserialization_schema_selector=
            card_set_deserialization_disambiguation,
        ),
    )
    meta = fields.Nested(Meta.Schema())
Пример #7
0
class NitpickStylesSectionSchema(BaseNitpickSchema):
    """Validation schema for the ``[nitpick.styles]`` section on the style file."""

    error_messages = {
        "unknown":
        help_message("Unknown configuration",
                     "nitpick_section.html#nitpick-styles")
    }

    include = PolyField(
        deserialization_schema_selector=fields.string_or_list_field)
Пример #8
0
class RouterDataSchema(Schema):
    router_data = PolyField(
        load_from="body",
        deserialization_schema_selector=conditional_token_check)

    @validates_schema(skip_on_field_errors=True)
    def register_router(self, data):
        router_type = data["path_kwargs"]["router_type"]
        router = self.context["routers"][router_type]
        try:
            router.register(uaid="", router_data=data["router_data"],
                            app_id=data["path_kwargs"]["app_id"])
        except RouterException as exc:
            raise InvalidRequest(exc.message, status_code=exc.status_code,
                                 errno=exc.errno, headers=exc.headers)
class AutoReplySchema(Schema):
    mp_id = fields.Int()
    reply_type = fields.Int()
    reply = PolyField(
        serialization_schema_selector=reply_serialization_schema_selector,
        deserialization_schema_selector=reply_deserialization_schema_selector,
        attribute="data",
    )

    @pre_dump
    def pre_dump(self, item):
        item.data = json.loads(item.data)
        return item

    @post_load
    def post_load(self, item):
        return AutoReply(**item)
Пример #10
0
class JobSchema(BaseSchema):

    id = fields.Str(allow_none=True)
    name = fields.Str(allow_none=True)
    trigger_type = fields.Str(allow_none=True)
    trigger = PolyField(
        allow_none=True,
        serialization_schema_selector=serialize_trigger_selector,
        deserialization_schema_selector=deserialize_trigger_selector,
    )
    request_template = fields.Nested('RequestTemplateSchema')
    misfire_grace_time = fields.Int(allow_none=True)
    coalesce = fields.Bool(allow_none=True)
    next_run_time = DateTime(allow_none=True,
                             format='epoch',
                             example='1500065932000')
    success_count = fields.Int(allow_none=True)
    error_count = fields.Int(allow_none=True)
    status = fields.Str(allow_none=True)
Пример #11
0
class ToolNitpickSchema(Schema):
    """Validation schema for the ``[tool.nitpick]`` section on ``pyproject.toml``."""

    style = PolyField(deserialization_schema_selector=detect_string_or_list,
                      required=False)

    @staticmethod
    def flatten_errors(errors: Dict) -> str:
        """Flatten Marshmallow errors to a string."""
        formatted = []
        for field, data in SortedDict(errors).items():
            if isinstance(data, list):
                messages_per_field = ["{}: {}".format(field, ", ".join(data))]
            elif isinstance(data, dict):
                messages_per_field = [
                    "{}[{}]: {}".format(field, index, ", ".join(messages))
                    for index, messages in data.items()
                ]
            else:
                # This should never happen; if it does, let's just convert to a string
                messages_per_field = [str(errors)]
            formatted.append("\n".join(messages_per_field))
        return "\n".join(formatted)
Пример #12
0
class WebPushRequestSchema(Schema):
    subscription = fields.Nested(WebPushSubscriptionSchema,
                                 load_from="token_info")
    headers = fields.Nested(WebPushBasicHeaderSchema)
    crypto_headers = PolyField(
        load_from="headers",
        deserialization_schema_selector=conditional_crypto_deserialize,
    )
    body = fields.Raw()
    token_info = fields.Raw()
    vapid_version = fields.String(required=False, missing=None)

    @validates('body')
    def validate_data(self, value):
        max_data = self.context["settings"].max_data
        if value and len(value) > max_data:
            raise InvalidRequest(
                "Data payload must be smaller than {}".format(max_data),
                errno=104,
            )

    @pre_load
    def token_prep(self, d):
        d["token_info"] = dict(
            api_ver=d["path_kwargs"].get("api_ver"),
            token=d["path_kwargs"].get("token"),
            ckey_header=d["headers"].get("crypto-key", ""),
            auth_header=d["headers"].get("authorization", ""),
        )
        return d

    def validate_auth(self, d):
        auth = d["headers"].get("authorization")
        needs_auth = d["token_info"]["api_ver"] == "v2"
        if not needs_auth and not auth:
            return
        try:
            vapid_auth = parse_auth_header(auth)
            token = vapid_auth['t']
            d["vapid_version"] = "draft{:0>2}".format(vapid_auth['version'])
            if vapid_auth['version'] == 2:
                public_key = vapid_auth['k']
            else:
                public_key = d["subscription"].get("public_key")
            jwt = extract_jwt(token, public_key)
        except (KeyError, ValueError, InvalidSignature, TypeError,
                VapidAuthException):
            raise InvalidRequest("Invalid Authorization Header",
                                 status_code=401,
                                 errno=109,
                                 headers={"www-authenticate": PREF_SCHEME})
        if "exp" not in jwt:
            raise InvalidRequest("Invalid bearer token: No expiration",
                                 status_code=401,
                                 errno=109,
                                 headers={"www-authenticate": PREF_SCHEME})

        try:
            jwt_expires = int(jwt['exp'])
        except ValueError:
            raise InvalidRequest("Invalid bearer token: Invalid expiration",
                                 status_code=401,
                                 errno=109,
                                 headers={"www-authenticate": PREF_SCHEME})

        now = time.time()
        jwt_has_expired = now > jwt_expires
        if jwt_has_expired:
            raise InvalidRequest("Invalid bearer token: Auth expired",
                                 status_code=401,
                                 errno=109,
                                 headers={"www-authenticate": PREF_SCHEME})
        jwt_too_far_in_future = (jwt_expires - now) > (60 * 60 * 24)
        if jwt_too_far_in_future:
            raise InvalidRequest(
                "Invalid bearer token: Auth > 24 hours in "
                "the future",
                status_code=401,
                errno=109,
                headers={"www-authenticate": PREF_SCHEME})
        jwt_crypto_key = base64url_encode(public_key)
        d["jwt"] = dict(jwt_crypto_key=jwt_crypto_key, jwt_data=jwt)

    @post_load
    def fixup_output(self, d):
        # Verify authorization
        # Note: This has to be done here, since schema validation takes place
        #       before nested schemas, and in this case we need all the nested
        #       schema logic to run first.
        self.validate_auth(d)

        # Merge crypto headers back in
        if d["crypto_headers"]:
            d["headers"].update({
                k.replace("_", "-"): v
                for k, v in d["crypto_headers"].items()
            })

        # Base64-encode data for Web Push
        d["body"] = base64url_encode(d["body"])

        # Set the notification based on the validated request schema data
        d["notification"] = WebPushNotification.from_webpush_request_schema(
            data=d,
            fernet=self.context["settings"].fernet,
            legacy=self.context["settings"]._notification_legacy,
        )

        return d
Пример #13
0
class S(Schema):
    type = fields.String(required=True)
    ob = PolyField(serialization_schema_selector=selector_for_serialize,
                   deserialization_schema_selector=selector_for_deserialize,
                   required=True)
Пример #14
0
class TicketSchema(Schema):
    class Meta:
        unknown = EXCLUDE
        api_type = 'tickets'
        url = 'tickets'
        model = Ticket

    id = fields.Int()
    external_ticket_id = fields.String(data_key='externalTicketId')
    external_movement_id = fields.String(data_key='externalMovementId',
                                         required=False,
                                         missing=None)
    seat = fields.String(required=False, missing=None)
    qr_code_url = fields.String(data_key='qrCodeUrl',
                                required=False,
                                missing=None)
    session_date = fields.AwareDateTime(data_key='sessionDate',
                                        allow_none=False)
    title = fields.String(required=None, missing=False)
    external_event_id = fields.String(data_key='eventUid',
                                      required=False,
                                      missing=None)
    barcode = fields.String(data_key='barCode', required=False, missing=None)
    sector_name = fields.String(data_key='sectorName',
                                required=False,
                                missing=None)
    venue_name = fields.String(data_key='venueName',
                               required=False,
                               missing=None)
    venue_room = fields.String(data_key='venueRoom',
                               required=False,
                               missing=None)
    client_name = fields.String(data_key='clientName',
                                required=False,
                                missing=None)
    premium = fields.Boolean()
    client_email = fields.String(data_key='clientEmail',
                                 required=False,
                                 missing=None)
    price = fields.Int(required=False, missing=None)
    share_link = fields.String(data_key='shareLink',
                               required=False,
                               missing=None)
    external_customer_ref = fields.String(data_key='externalCustomerRef',
                                          required=False,
                                          missing=None)
    entrance = fields.String(required=False, missing=None)
    section = fields.String(required=False, missing=None)
    row = fields.String(required=False, missing=None)
    status = fields.String(required=False, missing=None)
    price_code = fields.String(data_key='priceCode',
                               required=False,
                               missing=None)
    created_at = fields.AwareDateTime(data_key='createdAt', allow_none=False)
    updated_at = fields.AwareDateTime(data_key='updatedAt', allow_none=False)
    user_id = RelatedResourceLinkField(schema=UserSchema,
                                       required=False,
                                       missing=None,
                                       data_key='user',
                                       microservice_aware=False)
    can_share = fields.Boolean(data_key='canShare',
                               allow_none=False,
                               required=False,
                               missing=False)
    share_code = fields.String(data_key='shareCode',
                               allow_none=True,
                               required=False,
                               missing=None)
    sharer_email = fields.String(data_key='sharerEmail',
                                 allow_none=True,
                                 required=False,
                                 missing=None)
    redeemer_email = fields.String(data_key='redeemerEmail',
                                   allow_none=True,
                                   required=False,
                                   missing=None)
    redeemed_at = fields.AwareDateTime(data_key='redeemedAt',
                                       required=False,
                                       missing=None)
    shared_at = fields.AwareDateTime(data_key='sharedAt',
                                     required=False,
                                     missing=None)
    sharer_id = RelatedResourceLinkField(schema=UserSchema,
                                         required=False,
                                         missing=None,
                                         data_key='sharer')
    redeemer_id = RelatedResourceLinkField(schema=UserSchema,
                                           required=False,
                                           missing=None,
                                           data_key='redeemer')
    event_date = RelatedResourceLinkField(schema=EventDateSchema,
                                          required=False,
                                          missing=None,
                                          data_key='eventDate')
    parent_ticket = PolyField(
        deserialization_schema_selector=parent_ticket_selector,
        data_key='parentTicket',
        required=False,
        missing=None,
        allow_none=True)
    legal_short_text = fields.String(data_key='legalShortText',
                                     required=False,
                                     allow_none=True,
                                     missing=None)
    legal_long_text = fields.String(data_key='legalLongText',
                                    required=False,
                                    allow_none=True,
                                    missing=None)
    map_url = fields.String(data_key='mapUrl',
                            required=False,
                            allow_none=True,
                            missing=None)
    map_image_url = fields.String(data_key='mapImageUrl',
                                  required=False,
                                  allow_none=True,
                                  missing=None)
    ticket_integration = RelatedResourceField(schema=TicketIntegrationSchema,
                                              required=False,
                                              missing=None,
                                              data_key='ticketIntegration')
    ticket_auth = RelatedResourceField(schema=TicketTicketAuthSchema,
                                       data_key='ticketAuth',
                                       missing=None,
                                       allow_none=True)
    event = RelatedResourceLinkField(schema=EventSchema,
                                     required=False,
                                     missing=None)
    venue = RelatedResourceLinkField(schema=VenueSchema,
                                     required=False,
                                     missing=None)
    currency = RelatedResourceLinkField(schema=CurrencySchema,
                                        required=False,
                                        missing=None)
Пример #15
0
class MenuSchema(Schema):
    buttons = PolyField(
        serialization_schema_selector=button_serialization_schema_selector,
        deserialization_schema_selector=button_deserialization_schema_selector,
        many=True,
    )
Пример #16
0
class PostSensorDataSchema(SensorDataDescriptionSchema):
    """
    This schema includes data, so it can be used for POST requests
    or GET responses.

    TODO: For the GET use case, look at api/common/validators.py::get_data_downsampling_allowed
          (sets a resolution parameter which we can pass to the data collection function).
    """

    # Optional field that can be used for extra validation
    type = fields.Str(
        required=False,
        validate=OneOf(
            [
                "PostSensorDataRequest",
                "PostMeterDataRequest",
                "PostPrognosisRequest",
                "PostPriceDataRequest",
                "PostWeatherDataRequest",
            ]
        ),
    )
    values = PolyField(
        deserialization_schema_selector=select_schema_to_ensure_list_of_floats,
        serialization_schema_selector=select_schema_to_ensure_list_of_floats,
        many=False,
    )

    @validates_schema
    def check_user_may_create(self, data, **kwargs):
        check_access(data["sensor"], "create-children")

    @validates_schema
    def check_schema_unit_against_type(self, data, **kwargs):
        posted_unit = data["unit"]
        _type = data.get("type", None)
        if (
            _type
            in (
                "PostMeterDataRequest",
                "PostPrognosisRequest",
            )
            and not units_are_convertible(posted_unit, "MW")
        ):
            raise ValidationError(
                f"The unit required for this message type should be convertible to MW, got incompatible unit: {posted_unit}"
            )
        elif _type == "PostPriceDataRequest" and not is_energy_price_unit(posted_unit):
            raise ValidationError(
                f"The unit required for this message type should be convertible to an energy price unit, got incompatible unit: {posted_unit}"
            )

    @validates_schema
    def check_resolution_compatibility_of_values(self, data, **kwargs):
        inferred_resolution = data["duration"] / len(data["values"])
        required_resolution = data["sensor"].event_resolution
        # TODO: we don't yet have a good policy w.r.t. zero-resolution (direct measurement)
        if required_resolution == timedelta(hours=0):
            return
        if inferred_resolution % required_resolution != timedelta(hours=0):
            raise ValidationError(
                f"Resolution of {inferred_resolution} is incompatible with the sensor's required resolution of {required_resolution}."
            )

    @post_load()
    def post_load_sequence(self, data: dict, **kwargs) -> BeliefsDataFrame:
        """If needed, upsample and convert units, then deserialize to a BeliefsDataFrame."""
        data = self.possibly_upsample_values(data)
        data = self.possibly_convert_units(data)
        bdf = self.load_bdf(data)

        # Post-load validation against message type
        _type = data.get("type", None)
        if _type == "PostMeterDataRequest":
            if any(h > timedelta(0) for h in bdf.belief_horizons):
                raise ValidationError("Meter data must lie in the past.")
        elif _type == "PostPrognosisRequest":
            if any(h < timedelta(0) for h in bdf.belief_horizons):
                raise ValidationError("Prognoses must lie in the future.")

        return bdf

    @staticmethod
    def possibly_convert_units(data):
        """
        Convert values if needed, to fit the sensor's unit.
        Marshmallow runs this after validation.
        """
        data["values"] = convert_units(
            data["values"],
            from_unit=data["unit"],
            to_unit=data["sensor"].unit,
            event_resolution=data["sensor"].event_resolution,
        )
        return data

    @staticmethod
    def possibly_upsample_values(data):
        """
        Upsample the data if needed, to fit to the sensor's resolution.
        Marshmallow runs this after validation.
        """
        inferred_resolution = data["duration"] / len(data["values"])
        required_resolution = data["sensor"].event_resolution

        # TODO: we don't yet have a good policy w.r.t. zero-resolution (direct measurement)
        if required_resolution == timedelta(hours=0):
            return data

        # we already know resolutions are compatible (see validation)
        if inferred_resolution != required_resolution:
            data["values"] = upsample_values(
                data["values"],
                from_resolution=inferred_resolution,
                to_resolution=required_resolution,
            )
        return data

    @staticmethod
    def load_bdf(sensor_data: dict) -> BeliefsDataFrame:
        """
        Turn the de-serialized and validated data into a BeliefsDataFrame.
        """
        source = DataSource.query.filter(
            DataSource.user_id == current_user.id
        ).one_or_none()
        if not source:
            raise ValidationError(
                f"User {current_user.id} is not an accepted data source."
            )

        num_values = len(sensor_data["values"])
        event_resolution = sensor_data["duration"] / num_values
        dt_index = pd.date_range(
            sensor_data["start"],
            periods=num_values,
            freq=event_resolution,
        )
        s = pd.Series(sensor_data["values"], index=dt_index)

        # Work out what the recording time should be
        belief_timing = {}
        if "prior" in sensor_data:
            belief_timing["belief_time"] = sensor_data["prior"]
        elif "horizon" in sensor_data:
            belief_timing["belief_horizon"] = sensor_data["horizon"]
        else:
            belief_timing["belief_time"] = server_now()
        return BeliefsDataFrame(
            s,
            source=source,
            sensor=sensor_data["sensor"],
            **belief_timing,
        )