Exemple #1
0
def load_config(cls, **kwargs):
    schema = marshmallow_dataclass.class_schema(cls)()
    return schema.load(kwargs)
Exemple #2
0
    def test_simple_unique_schemas(self):
        @dataclasses.dataclass
        class Simple:
            one: str = dataclasses.field()
            two: str = dataclasses.field()

        @dataclasses.dataclass
        class ComplexNested:
            three: int = dataclasses.field()
            four: Simple = dataclasses.field()

        self.assertIs(class_schema(ComplexNested), class_schema(ComplexNested))
        self.assertIs(class_schema(Simple), class_schema(Simple))
        self.assertIs(
            class_schema(Simple),
            class_schema(ComplexNested)._declared_fields["four"].nested,
        )

        complex_set = {
            class_schema(ComplexNested),
            class_schema(ComplexNested, base_schema=None),
            class_schema(ComplexNested, None),
        }
        simple_set = {
            class_schema(Simple),
            class_schema(Simple, base_schema=None),
            class_schema(Simple, None),
        }
        self.assertEqual(len(complex_set), 1)
        self.assertEqual(len(simple_set), 1)
Exemple #3
0
@dataclass
class Status:
    version: str
    status: str


@dataclass
class Feedback:
    question_text: str
    answer_text_id: str
    answer_text: str
    answer_score: float
    feedback: bool


AnswerSchema = marshmallow_dataclass.class_schema(Answer)
answer_schema = AnswerSchema()
answer_list_schema = AnswerSchema(many=True)
QuestionSchema = marshmallow_dataclass.class_schema(Question)
question_schema = QuestionSchema()
question_list_schema = QuestionSchema(many=True)
StatusSchema = marshmallow_dataclass.class_schema(Status)
status_schema = StatusSchema()
FeedbackSchema = marshmallow_dataclass.class_schema(Feedback)
feedback_list_schema = FeedbackSchema(many=True)


@ns_covid_qa.route('/')
class Covid19(Resource):
    @responds(schema=status_schema, api=ns_covid_qa, status_code=200)
    def get(self):
"""
Dataclass for storing params
"""

from dataclasses import dataclass
from marshmallow_dataclass import class_schema
import yaml


@dataclass()
class PredictPipelineParams:
    """
    Pipelines params
    """
    input_data_path: str
    dump_model: str
    result_path: str


PredictPipelineParamsSchema = class_schema(PredictPipelineParams)


def read_predict_pipeline_params(path: str) -> PredictPipelineParams:
    """
    reading params
    """
    with open(path, "r") as input_stream:
        schema = PredictPipelineParamsSchema()
        return schema.load(yaml.safe_load(input_stream))
Exemple #5
0
class ComposeGgo(Controller):
    """
    Provided an address to a [parent] GGO, this endpoint will split it up
    into multiple new GGOs ("composing" them from the parent) and transfer
    the new GGOs (children) to other accounts and/or retire them to any
    of the user's own MeteringPoints.

    To do this, provide one or more TransferRequests along with one or
    more RetireRequests.The sum of these can not exceed the parent GGO's
    amount, but can, however deceed it. Any remaining amount is automatically
    transferred back to the owner of the parent GGO.

    # Transfers

    Each TransferRequests contains an amount in Wh, an account ID to
    transfer the given amount to, and an arbitrary reference string
    for future enquiry if necessary.

    # Retires

    Each RetireRequests contains an amount in Wh, and a GSRN number to
    retire the specified amount to. The MeteringPoint, identified by the
    GSRN number, must belong to the user itself.

    # Concurrency

    The requested transfers and retires are considered successful upon
    response from this endpoint if the returned value of "success" is true.
    This means that subsequent requests to other endpoints will immediately
    assume the transfers or retires valid.

    However, due to the asynchronous nature of the blockchain ledger, this
    operation may be rolled back later for reasons that could not be foreseen
    at the time invoking this endpoint. This will result in the parent GGO
    being stored and available to the user's account again, thus also cancelling
    transfers and retires.
    """
    Request = md.class_schema(ComposeGgoRequest)
    Response = md.class_schema(ComposeGgoResponse)

    @require_oauth(['ggo.transfer', 'ggo.retire'])
    @inject_user
    @inject_session
    def handle_request(self, request, user, session):
        """
        :param ComposeGgoRequest request:
        :param User user:
        :param sqlalchemy.orm.Session session:
        :rtype: ComposeGgoResponse
        """
        batch, recipients = self.compose(
            user=user,
            ggo_address=request.address,
            transfers=request.transfers,
            retires=request.retires,
        )

        start_handle_composed_ggo_pipeline(batch, recipients, session)

        return ComposeGgoResponse(success=True)

    @atomic
    def compose(self, user, ggo_address, transfers, retires, session):
        """
        :param User user:
        :param str ggo_address:
        :param list[TransferRequest] transfers:
        :param list[RetireRequest] retires:
        :param sqlalchemy.orm.Session session:
        :rtype: (Batch, list[User])
        :returns: Tuple the composed Batch along with a list of users
            who receive GGO by transfers
        """
        ggo = self.get_ggo(user, ggo_address, session)
        composer = self.get_composer(ggo, session)

        for transfer in transfers:
            self.add_transfer(composer, transfer, session)

        for retire in retires:
            self.add_retire(user, composer, retire, session)

        try:
            batch, recipients = composer.build_batch()
        except composer.Empty:
            raise BadRequest('Nothing to transfer/retire')
        except composer.AmountUnavailable:
            raise BadRequest('Requested amount exceeds available amount')

        session.add(batch)

        return batch, recipients

    def add_transfer(self, composer, request, session):
        """
        :param GgoComposer composer:
        :param TransferRequest request:
        :param sqlalchemy.orm.Session session:
        """
        target_user = self.get_user(request.account, session)

        if target_user is None:
            raise BadRequest(f'Account unavailable ({request.account})')

        composer.add_transfer(target_user, request.amount, request.reference)

    def add_retire(self, user, composer, request, session):
        """
        :param User user:
        :param GgoComposer composer:
        :param RetireRequest request:
        :param sqlalchemy.orm.Session session:
        """
        meteringpoint = self.get_metering_point(user, request.gsrn, session)

        if meteringpoint is None:
            raise BadRequest(
                f'MeteringPoint unavailable (GSRN: {request.gsrn})')

        try:
            composer.add_retire(meteringpoint, request.amount)
        except composer.RetireMeasurementUnavailable as e:
            raise BadRequest((f'No measurement available at {e.begin} '
                              f'for GSRN {e.gsrn}'))
        except composer.RetireMeasurementInvalid as e:
            raise BadRequest(
                f'Can not retire GGO to measurement {e.measurement.address}')
        except composer.RetireAmountInvalid as e:
            raise BadRequest((f'Can only retire up to {e.allowed_amount} '
                              f'(you tried to retire {e.amount})'))

    def get_ggo(self, user, ggo_address, session):
        """
        :param User user:
        :param str ggo_address:
        :param sqlalchemy.orm.Session session:
        :rtype: Ggo
        """
        ggo = GgoQuery(session) \
            .belongs_to(user) \
            .has_address(ggo_address) \
            .is_tradable() \
            .one_or_none()

        if not ggo:
            raise BadRequest('GGO not found or is unavailable: %s' %
                             ggo_address)

        return ggo

    def get_user(self, sub, session):
        """
        :param str sub:
        :param sqlalchemy.orm.Session session:
        :rtype: User
        """
        return UserQuery(session) \
            .is_active() \
            .has_sub(sub) \
            .one_or_none()

    def get_metering_point(self, user, gsrn, session):
        """
        :param User user:
        :param str gsrn:
        :param sqlalchemy.orm.Session session:
        :rtype: MeteringPoint
        """
        return MeteringPointQuery(session) \
            .belongs_to(user) \
            .has_gsrn(gsrn) \
            .is_consumption() \
            .one_or_none()

    def get_composer(self, *args, **kwargs):
        """
        :rtype: GgoComposer
        """
        return GgoComposer(*args, **kwargs)
Exemple #6
0
def schema() -> Schema:
    return marshmallow_dataclass.class_schema(TestDTO)()
Exemple #7
0
def test__GetMixEmissionsResponse__should_map_data_correctly():
    source_json = {
        "success":
        True,
        "mix_emissions": [
            {
                "timestamp_utc": "2019-09-17T22:00:00.000Z",
                "sector": "DK1",
                "technology": "Wind",
                "amount": 1000,
                "CO2": 111,
                "CH4": 222,
            },
            {
                "timestamp_utc": "2019-09-17T22:00:00.000Z",
                "sector": "DK1",
                "technology": "Solar",
                "amount": 2000,
                "CO2": 333,
                "CH4": 444,
            },
            {
                "timestamp_utc": "2019-09-17T23:00:00.000Z",
                "sector": "DK1",
                "technology": "Wind",
                "amount": 3000,
                "CO2": 555,
                "CH4": 666,
            },
            {
                "timestamp_utc": "2019-09-17T23:00:00.000Z",
                "sector": "DK1",
                "technology": "Solar",
                "amount": 4000,
                "CO2": 777,
                "CH4": 888,
            },
            {
                "timestamp_utc": "2019-09-17T22:00:00.000Z",
                "sector": "DK2",
                "technology": "Wind",
                "amount": 5000,
                "CO2": 999,
                "CH4": 101010,
            },
            {
                "timestamp_utc": "2019-09-17T22:00:00.000Z",
                "sector": "DK2",
                "technology": "Solar",
                "amount": 6000,
                "CO2": 111111,
                "CH4": 121212,
            },
            {
                "timestamp_utc": "2019-09-17T23:00:00.000Z",
                "sector": "DK2",
                "technology": "Wind",
                "amount": 7000,
                "CO2": 131313,
                "CH4": 141414,
            },
            {
                "timestamp_utc": "2019-09-17T23:00:00.000Z",
                "sector": "DK2",
                "technology": "Solar",
                "amount": 8000,
                "CO2": 151515,
                "CH4": 161616,
            },
        ]
    }

    schema = md.class_schema(GetMixEmissionsResponse)
    schema_instance = schema()
    model = schema_instance.load(source_json)

    assert len(model.mix_emissions) == 4
    assert len(model.mix_emissions[0].parts) == 2
    assert len(model.mix_emissions[1].parts) == 2
    assert len(model.mix_emissions[2].parts) == 2
    assert len(model.mix_emissions[3].parts) == 2

    assert model.mix_emissions[0].timestamp_utc == datetime(
        2019, 9, 17, 22, 0, 0, 0, tzinfo=timezone.utc)
    assert model.mix_emissions[0].sector == 'DK1'
    assert model.mix_emissions[0].parts[0].technology == 'Wind'
    assert model.mix_emissions[0].parts[0].amount == 1000
    assert model.mix_emissions[0].parts[0].emissions == {
        'CO2': 111,
        'CH4': 222
    }
    assert model.mix_emissions[0].parts[1].technology == 'Solar'
    assert model.mix_emissions[0].parts[1].amount == 2000
    assert model.mix_emissions[0].parts[1].emissions == {
        'CO2': 333,
        'CH4': 444
    }

    assert model.mix_emissions[1].timestamp_utc == datetime(
        2019, 9, 17, 23, 0, 0, 0, tzinfo=timezone.utc)
    assert model.mix_emissions[1].sector == 'DK1'
    assert model.mix_emissions[1].parts[0].technology == 'Wind'
    assert model.mix_emissions[1].parts[0].amount == 3000
    assert model.mix_emissions[1].parts[0].emissions == {
        'CO2': 555,
        'CH4': 666
    }
    assert model.mix_emissions[1].parts[1].technology == 'Solar'
    assert model.mix_emissions[1].parts[1].amount == 4000
    assert model.mix_emissions[1].parts[1].emissions == {
        'CO2': 777,
        'CH4': 888
    }

    assert model.mix_emissions[2].timestamp_utc == datetime(
        2019, 9, 17, 22, 0, 0, 0, tzinfo=timezone.utc)
    assert model.mix_emissions[2].sector == 'DK2'
    assert model.mix_emissions[2].parts[0].technology == 'Wind'
    assert model.mix_emissions[2].parts[0].amount == 5000
    assert model.mix_emissions[2].parts[0].emissions == {
        'CO2': 999,
        'CH4': 101010
    }
    assert model.mix_emissions[2].parts[1].technology == 'Solar'
    assert model.mix_emissions[2].parts[1].amount == 6000
    assert model.mix_emissions[2].parts[1].emissions == {
        'CO2': 111111,
        'CH4': 121212
    }

    assert model.mix_emissions[3].timestamp_utc == datetime(
        2019, 9, 17, 23, 0, 0, 0, tzinfo=timezone.utc)
    assert model.mix_emissions[3].sector == 'DK2'
    assert model.mix_emissions[3].parts[0].technology == 'Wind'
    assert model.mix_emissions[3].parts[0].amount == 7000
    assert model.mix_emissions[3].parts[0].emissions == {
        'CO2': 131313,
        'CH4': 141414
    }
    assert model.mix_emissions[3].parts[1].technology == 'Solar'
    assert model.mix_emissions[3].parts[1].amount == 8000
    assert model.mix_emissions[3].parts[1].emissions == {
        'CO2': 151515,
        'CH4': 161616
    }
Exemple #8
0
        default_factory=lambda: NutidUserExtensionV1(),
        metadata={
            'data_key': SCIMSchema.NUTID_USER_V1.value,
            'required': False
        },
    )


@dataclass(frozen=True)
class UserCreateRequest(User, BaseCreateRequest):
    pass


@dataclass(frozen=True)
class UserUpdateRequest(User, BaseUpdateRequest):
    pass


@dataclass(frozen=True)
class UserResponse(User, BaseResponse):
    pass


NutidExtensionV1Schema = class_schema(NutidUserExtensionV1,
                                      base_schema=BaseSchema)
UserCreateRequestSchema = class_schema(UserCreateRequest,
                                       base_schema=BaseSchema)
UserUpdateRequestSchema = class_schema(UserUpdateRequest,
                                       base_schema=BaseSchema)
UserResponseSchema = class_schema(UserResponse, base_schema=BaseSchema)
Exemple #9
0
    def test_transfer_ggo_success(self):

        key = BIP32Key.fromEntropy(
            "the_valid_key_that_owns_the_specific_ggo".encode())
        ggo_src = generate_address(AddressPrefix.GGO, key.PublicKey())

        ggo = GGO.get_schema().dumps(
            GGO(origin=
                'meaaaa1c37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c',
                amount=80,
                begin=datetime(2020, 1, 1, 12, tzinfo=timezone.utc),
                end=datetime(2020, 1, 1, 13, tzinfo=timezone.utc),
                tech_type='T12412',
                fuel_type='F010101',
                sector='DK1',
                next=None,
                emissions={
                    "co2": {
                        "value": 1113342.14,
                        "unit": "g/Wh",
                    },
                    "so2": {
                        "value": 9764446,
                        "unit": "g/Wh",
                    },
                })).encode('utf8')

        context = MockContext(states={ggo_src: ggo})

        payload = class_schema(SplitGGORequest)().dumps(
            SplitGGORequest(origin=ggo_src,
                            parts=[
                                SplitGGOPart(address="split1_add", amount=10),
                                SplitGGOPart(address="split2_add", amount=20),
                                SplitGGOPart(address="split3_add", amount=50)
                            ])).encode('utf8')

        transaction = self.create_fake_transaction(
            inputs=[ggo_src, "split1_add", "split2_add", "split3_add"],
            outputs=[ggo_src, "split1_add", "split2_add", "split3_add"],
            payload=payload,
            key=key)

        SplitGGOTransactionHandler().apply(transaction, context)

        self.assertIn(ggo_src, context.states)
        obj = json.loads(context.states[ggo_src].decode('utf8'))
        self.assertEqual(len(obj), 9)

        self.assertEqual(
            obj['origin'],
            'meaaaa1c37509b1de4a7f9f1c59e0efc2ed285e7c96c29d5271edd8b4c2714e3c8979c'
        )
        self.assertEqual(obj['amount'], 80)
        self.assertEqual(obj['begin'], '2020-01-01T12:00:00+00:00')
        self.assertEqual(obj['end'], '2020-01-01T13:00:00+00:00')
        self.assertEqual(obj['sector'], 'DK1')
        self.assertEqual(obj['tech_type'], 'T12412')
        self.assertEqual(obj['fuel_type'], 'F010101')
        self.assertEqual(
            obj['emissions'], {
                "co2": {
                    "value": 1113342.14,
                    "unit": "g/Wh",
                },
                "so2": {
                    "value": 9764446,
                    "unit": "g/Wh",
                },
            })
        self.assertEqual(obj['next']['action'], GGOAction.SPLIT.name)
        self.assertEqual(len(obj['next']['addresses']), 3)
        self.assertEqual(obj['next']['addresses'][0], 'split1_add')
        self.assertEqual(obj['next']['addresses'][1], 'split2_add')
        self.assertEqual(obj['next']['addresses'][2], 'split3_add')

        obj = json.loads(context.states['split1_add'].decode('utf8'))
        self.assertEqual(len(obj), 9)
        self.assertEqual(obj['origin'], ggo_src)
        self.assertEqual(obj['amount'], 10)
        self.assertEqual(obj['begin'], '2020-01-01T12:00:00+00:00')
        self.assertEqual(obj['end'], '2020-01-01T13:00:00+00:00')
        self.assertEqual(obj['sector'], 'DK1')
        self.assertEqual(obj['tech_type'], 'T12412')
        self.assertEqual(obj['fuel_type'], 'F010101')
        self.assertEqual(
            obj['emissions'], {
                "co2": {
                    "value": 1113342.14,
                    "unit": "g/Wh",
                },
                "so2": {
                    "value": 9764446,
                    "unit": "g/Wh",
                },
            })
        self.assertEqual(obj['next'], None)

        obj = json.loads(context.states['split2_add'].decode('utf8'))
        self.assertEqual(len(obj), 9)
        self.assertEqual(obj['origin'], ggo_src)
        self.assertEqual(obj['amount'], 20)
        self.assertEqual(obj['begin'], '2020-01-01T12:00:00+00:00')
        self.assertEqual(obj['end'], '2020-01-01T13:00:00+00:00')
        self.assertEqual(obj['sector'], 'DK1')
        self.assertEqual(obj['tech_type'], 'T12412')
        self.assertEqual(obj['fuel_type'], 'F010101')
        self.assertEqual(
            obj['emissions'], {
                "co2": {
                    "value": 1113342.14,
                    "unit": "g/Wh",
                },
                "so2": {
                    "value": 9764446,
                    "unit": "g/Wh",
                },
            })
        self.assertEqual(obj['next'], None)

        obj = json.loads(context.states['split3_add'].decode('utf8'))
        self.assertEqual(len(obj), 9)
        self.assertEqual(obj['origin'], ggo_src)
        self.assertEqual(obj['amount'], 50)
        self.assertEqual(obj['begin'], '2020-01-01T12:00:00+00:00')
        self.assertEqual(obj['end'], '2020-01-01T13:00:00+00:00')
        self.assertEqual(obj['sector'], 'DK1')
        self.assertEqual(obj['tech_type'], 'T12412')
        self.assertEqual(obj['fuel_type'], 'F010101')
        self.assertEqual(
            obj['emissions'], {
                "co2": {
                    "value": 1113342.14,
                    "unit": "g/Wh",
                },
                "so2": {
                    "value": 9764446,
                    "unit": "g/Wh",
                },
            })
        self.assertEqual(obj['next'], None)
Exemple #10
0
"""Gigya schemas."""
import marshmallow_dataclass

from . import models
from renault_api.models import BaseSchema


GigyaResponseSchema = marshmallow_dataclass.class_schema(
    models.GigyaResponse, base_schema=BaseSchema
)()


GigyaLoginResponseSchema = marshmallow_dataclass.class_schema(
    models.GigyaLoginResponse, base_schema=BaseSchema
)()


GigyaGetAccountInfoResponseSchema = marshmallow_dataclass.class_schema(
    models.GigyaGetAccountInfoResponse, base_schema=BaseSchema
)()


GigyaGetJWTResponseSchema = marshmallow_dataclass.class_schema(
    models.GigyaGetJWTResponse, base_schema=BaseSchema
)()
Exemple #11
0
 def post(self, userId: int):
     '''Добавление нового счета'''
     schema = marshmallow_dataclass.class_schema(UserAccount)
     acc: UserAccount = schema().load(self.api.payload)
     return app.taxerApi.add_user_account(userId, acc)
Exemple #12
0
                                       metadata={'required': False})
    nutid_group_v1: NutidGroupExtensionV1 = field(
        default_factory=lambda: NutidGroupExtensionV1(),
        metadata={
            'data_key': SCIMSchema.NUTID_GROUP_V1.value,
            'required': False
        },
    )


@dataclass(frozen=True)
class GroupCreateRequest(Group, BaseCreateRequest):
    pass


@dataclass(frozen=True)
class GroupUpdateRequest(Group, BaseUpdateRequest):
    pass


@dataclass(frozen=True)
class GroupResponse(Group, BaseResponse):
    pass


GroupCreateRequestSchema = class_schema(GroupCreateRequest,
                                        base_schema=BaseSchema)
GroupUpdateRequestSchema = class_schema(GroupUpdateRequest,
                                        base_schema=BaseSchema)
GroupResponseSchema = class_schema(GroupResponse, base_schema=BaseSchema)
Exemple #13
0
    Ggo,
    AccountService,
    AccountServiceError,
)

# Settings
RETRY_DELAY = 10
MAX_RETRIES = (24 * 60 * 60) / RETRY_DELAY
LOCK_TIMEOUT = 60 * 2

# Services / controllers
controller = GgoConsumerController()
account_service = AccountService()

# JSON schemas
ggo_schema = md.class_schema(Ggo)()


def start_handle_ggo_received_pipeline(ggo, user):
    """
    :param Ggo ggo:
    :param User user:
    """
    handle_ggo_received \
        .s(
            subject=user.sub,
            ggo_json=ggo_schema.dump(ggo),
            address=ggo.address,
        ) \
        .apply_async()
Exemple #14
0
    end: time = field(metadata={"required": True})
    fare: float = field(metadata={"required": True})


@dataclass
class RidesConfig:
    distance_unit: str = field(metadata={"required": True})
    currency: str = field(metadata={"required": True})
    distance_rate_cost: float = field(metadata={"required": True})
    initial_fare: float = field(metadata={"required": True})
    slot_fares: List[SlotFareConfig] = field(default_factory=list,
                                             metadata={"required": True})


@dataclass
class SlotFare:
    start: time
    fare: time


@dataclass
class Ride:
    id: int
    startTime: datetime
    distance: int
    duration: timedelta


RidesConfigSchema = marshmallow_dataclass.class_schema(RidesConfig)
RideSchema = marshmallow_dataclass.class_schema(Ride)
Exemple #15
0
from dataclasses import dataclass, field
from enum import Enum

from marshmallow_dataclass import class_schema


class Command(Enum):
    CREATE = 'create'
    DELETE = 'delete'


@dataclass
class Config:
    bulk_size: int = field(metadata=dict(data_key='main_bulk_size'))


Config.Schema = class_schema(Config)

json_data = {'main_bulk_size': 20}

config = Config.Schema().load(json_data)
print(config)

assert config.bulk_size == 20
Exemple #16
0
                           description=func.__doc__.partition('\n')[0],
                           operations=load_yaml_from_docstring(func.__doc__))
            return func

        return wrapped


api = OpenAPISpec(
    title="Template",
    version="0.0.1",
    openapi_version="3.0.2",
    info=dict(description="Leolani component template"),
    plugins=[MarshmallowPlugin()],
)

api.components.schema("ExampleInput", schema=class_schema(ExampleInput))
api.components.schema("ExampleOutput", schema=class_schema(ExampleOutput))


@api.path("/template/api/foo/bar")
def foo_bar(input):
    """Short Description included in OpenAPI spec

    A longer description can go here.

    The yaml snippet below is included in the OpenAPI spec for the endpoint:
    ---
    get:
      operationId: rest.endpoint.foo_bar
      responses:
        '200':
Exemple #17
0
 def to_schema(cls):
     return marshmallow_dataclass.class_schema(cls)()
Exemple #18
0
    """
    train: Logger = MISSING


@dataclass()
class LogConfigParams:
    """
            Params of logs yaml configuration
    """
    version: int
    formatters: Formatters
    handlers: Handlers
    loggers: Loggers


ConfigSchema = class_schema(LogConfigParams)


def setup_logging(log_yaml: str, log_file_path: str):
    """Setups logging configurations"""
    path = os.path.join(os.getcwd(), CONFIG_DIR, log_yaml)
    with open(path) as config:
        dict_yaml = yaml.safe_load(config)
        dict_yaml["handlers"]["file_handler"]["filename"] = log_file_path
        logging.config.dictConfig(dict_yaml)

    #with open(path, "r") as config:
    #    schema = ConfigSchema()
    #    params = schema.load(yaml.safe_load(config))
    #    params.handlers.file_handler.filename = log_file_path
    #    print(params.__dict__)
Exemple #19
0
def getSchema(dataclass):
    return marshmallow_dataclass.class_schema(dataclass)()
Exemple #20
0
    def execute_server_query(self, server_query_executor_class_name: str,
                             parameters: list):
        """Calls /execute_server_query API"""
        try:

            action = "execute_server_query"
            dict_params = {
                "server_query_executor_class_name":
                server_query_executor_class_name
            }
            AceQLDebug.debug("dictParams 1: " + str(dict_params))

            server_query_executor_dto_schema = marshmallow_dataclass.class_schema(
                ServerQueryExecutorDto)
            server_query_executor_dto: ServerQueryExecutorDto = ServerQueryExecutorDtoBuilder.build(
                server_query_executor_class_name, parameters)
            json_string: str = server_query_executor_dto_schema().dumps(
                server_query_executor_dto)

            dict_params["server_query_executor_dto"] = json_string
            url_withaction = self.__url + action

            AceQLDebug.debug("url_withaction: " + url_withaction)
            AceQLDebug.debug("parameters    : " + str(parameters))

            self.update_dict_params(dict_params)
            AceQLDebug.debug("dictParams 2: " + str(dict_params))

            # r = requests.post('http://httpbin.org/post', data = {'key':'value'})

            if self.__aceQLHttpApi.get_timeout() is None:
                AceQLDebug.debug("QUERY HERE 1")
                response: Request = requests.post(
                    url_withaction,
                    headers=self.__aceQLHttpApi.get_headers(),
                    data=dict_params,
                    proxies=self.__aceQLHttpApi.get_proxies(),
                    auth=self.__aceQLHttpApi.get_auth())
            else:
                AceQLDebug.debug("QUERY HERE 2")
                response: Request = requests.post(
                    url_withaction,
                    headers=self.__aceQLHttpApi.get_headers(),
                    data=dict_params,
                    proxies=self.__aceQLHttpApi.get_proxies(),
                    auth=self.__aceQLHttpApi.get_auth(),
                    timeout=self.__aceQLHttpApi.get_timeout())
            AceQLDebug.debug("DONE!")
            self.__aceQLHttpApi.set_http_status_code(response.status_code)

            filename = FileUtil.build_result_set_file()
            AceQLDebug.debug("filename1: " + filename)

            # We dump the JSon stream into user.home/.kawansoft/tmp
            with open(filename, 'wb') as fd:
                for chunk in response.iter_content(chunk_size=2048):
                    fd.write(chunk)

            AceQLDebug.debug("after open filename")
            result_set_info = self.treat_result(filename)
            return result_set_info

        except Exception as e:
            if isinstance(e, Error):
                raise
            else:
                raise Error(str(e), 0, e, None,
                            self.__aceQLHttpApi.get_http_status_code())
Exemple #21
0
import marshmallow_dataclass
from flask import Blueprint, request
from flask.typing import ResponseReturnValue
from marshmallow import ValidationError
from sqlalchemy.orm import joinedload

log = logging.getLogger(__name__)


@dataclass
class CheckAlias:
    alias: str


CheckAliasSchema = marshmallow_dataclass.class_schema(CheckAlias)


@dataclass
class CommandUpdate:
    data_enabled: Optional[bool]
    data_delay_all: Optional[int]
    data_delay_user: Optional[int]
    data_cost: Optional[int]
    data_can_execute_with_whisper: Optional[bool]
    data_sub_only: Optional[bool]
    data_mod_only: Optional[bool]
    data_action_type: Optional[str]
    data_action_message: Optional[str]
    data_use_global_cd: Optional[bool]
    data_run_through_banphrases: Optional[bool]
logger = get_logger(__name__)

graphs = Namespace("graphs", path="/graphs")


@dataclass
class GraphResponse(HALModel):
    """The response after creating or requesting a graph."""

    dashboard_uid: str
    slug: str
    panel_id: int
    embed_url: str


RawInfluxQuerySchema = marshmallow_dataclass.class_schema(RawInfluxDbQuery)
InfluxQuerySchema = marshmallow_dataclass.class_schema(InfluxDbQuery)


class ThresholdSchema(Schema):
    value = fields.Number()
    op = fields.String(default="gt")
    yaxis = fields.String(default="left",
                          validate=validate.OneOf(("left", "right")))
    color_mode = fields.String(default="warning")


class YAxisSchema(Schema):
    format = fields.String(validate=validate.OneOf(YAXIS_TYPES))
    min = fields.Number(default=0, allow_none=True)
    max = fields.Number(default=None, allow_none=True)
Exemple #23
0
from dataclasses import dataclass, field
from .split_params import SplittingParams
from .feature_params import FeatureParams
from .train_params import TrainingParams
from marshmallow_dataclass import class_schema
import yaml


@dataclass()
class TrainingPipelineParams:
    input_data_path: str
    output_model_path: str
    metric_path: str
    output_predict_path: str
    splitting_params: SplittingParams
    feature_params: FeatureParams
    train_params: TrainingParams


TrainingPipelineParamsSchema = class_schema(TrainingPipelineParams)


def read_training_pipeline_params(path: str) -> TrainingPipelineParams:
    with open(path, "r") as input_stream:
        schema = TrainingPipelineParamsSchema()
        return schema.load(yaml.safe_load(input_stream))
Exemple #24
0
def read_trainer_params(cfg: DictConfig) -> TrainerParams:
    trainer_schema = class_schema(TrainerParams)
    schema = trainer_schema()
    params = schema.load(cfg)
    return params
Exemple #25
0
class OnGgoIssuedWebhook(Controller):
    """
    Invoked by DataHubService when new GGO(s) have been issued
    to a specific meteringpoint.
    """
    Request = md.class_schema(OnGgosIssuedWebhookRequest)

    @validate_hmac
    @inject_session
    def handle_request(self, request, session):
        """
        :param OnGgosIssuedWebhookRequest request:
        :param sqlalchemy.orm.Session session:
        :rtype: bool
        """
        user = self.get_user(request.sub, session)

        if user and not self.ggo_exists(request.ggo.address, session):
            ggo = self.create_ggo(user, request.ggo)

            start_invoke_on_ggo_received_tasks(
                subject=request.sub,
                ggo_id=ggo.id,
                session=session,
            )

            return True

        return False

    @atomic
    def create_ggo(self, user, imported_ggo, session):
        """
        :param User user:
        :param origin.services.datahub.Ggo imported_ggo:
        :param sqlalchemy.orm.Session session:
        :rtype: Ggo
        """
        ggo = self.map_imported_ggo(user, imported_ggo)
        session.add(ggo)
        session.flush()
        return ggo

    def map_imported_ggo(self, user, imported_ggo):
        """
        :param User user:
        :param origin.services.datahub.Ggo imported_ggo:
        :rtype: Ggo
        """
        return Ggo(
            user_id=user.id,
            address=imported_ggo.address,
            issue_time=imported_ggo.issue_time,
            expire_time=imported_ggo.expire_time,
            begin=imported_ggo.begin,
            end=imported_ggo.end,
            amount=imported_ggo.amount,
            sector=imported_ggo.sector,
            technology_code=imported_ggo.technology_code,
            fuel_code=imported_ggo.fuel_code,
            emissions=imported_ggo.emissions,
            synchronized=True,
            issued=True,
            stored=True,
            locked=False,
            issue_gsrn=imported_ggo.gsrn,
        )

    def get_user(self, sub, session):
        """
        :param str sub:
        :param sqlalchemy.orm.Session session:
        :rtype: User
        """
        return UserQuery(session) \
            .is_active() \
            .has_sub(sub) \
            .one_or_none()

    def ggo_exists(self, address, session):
        """
        :param str address:
        :param sqlalchemy.orm.Session session:
        """
        count = GgoQuery(session) \
            .has_address(address) \
            .count()

        return count > 0
Exemple #26
0
import pajbot.web.utils  # NOQA
from pajbot.managers.redis import RedisManager
from pajbot.streamhelper import StreamHelper

import marshmallow_dataclass
from flask import Blueprint, request
from flask.typing import ResponseReturnValue
from marshmallow import ValidationError


@dataclass
class SocialSet:
    value: str


SocialSetSchema = marshmallow_dataclass.class_schema(SocialSet)


def init(bp: Blueprint) -> None:
    @bp.route("/social/<social_key>/set", methods=["POST"])
    @pajbot.web.utils.requires_level(500)
    def social_set(social_key: str, **options) -> ResponseReturnValue:
        try:
            json_data = request.get_json()
            if not json_data:
                return {"error": "Missing json body"}, 400
            data: SocialSet = SocialSetSchema().load(json_data)
        except ValidationError as err:
            return {"error": f"Did not match schema: {json.dumps(err.messages)}"}, 400

        streamer = StreamHelper.get_streamer()
Exemple #27
0
    def test_validator_stacking(self):
        # See: https://github.com/lovasoa/marshmallow_dataclass/issues/91
        class SimpleValidator(Validator):
            # Marshmallow checks for valid validators at construction time only using `callable`
            def __call__(self):
                pass

        validator_a = SimpleValidator()
        validator_b = SimpleValidator()
        validator_c = SimpleValidator()
        validator_d = SimpleValidator()

        CustomTypeOneValidator = NewType(
            "CustomTypeOneValidator", str, validate=validator_a
        )
        CustomTypeNoneValidator = NewType("CustomTypeNoneValidator", str, validate=None)
        CustomTypeMultiValidator = NewType(
            "CustomTypeNoneValidator", str, validate=[validator_a, validator_b]
        )

        @dataclasses.dataclass
        class A:
            data: CustomTypeNoneValidator = dataclasses.field()

        schema_a = class_schema(A)()
        self.assertListEqual(schema_a.fields["data"].validators, [])

        @dataclasses.dataclass
        class B:
            data: CustomTypeNoneValidator = dataclasses.field(
                metadata={"validate": validator_a}
            )

        schema_b = class_schema(B)()
        self.assertListEqual(schema_b.fields["data"].validators, [validator_a])

        @dataclasses.dataclass
        class C:
            data: CustomTypeNoneValidator = dataclasses.field(
                metadata={"validate": [validator_a, validator_b]}
            )

        schema_c = class_schema(C)()
        self.assertListEqual(
            schema_c.fields["data"].validators, [validator_a, validator_b]
        )

        @dataclasses.dataclass
        class D:
            data: CustomTypeOneValidator = dataclasses.field()

        schema_d = class_schema(D)()
        self.assertListEqual(schema_d.fields["data"].validators, [validator_a])

        @dataclasses.dataclass
        class E:
            data: CustomTypeOneValidator = dataclasses.field(
                metadata={"validate": validator_b}
            )

        schema_e = class_schema(E)()
        self.assertListEqual(
            schema_e.fields["data"].validators, [validator_a, validator_b]
        )

        @dataclasses.dataclass
        class F:
            data: CustomTypeOneValidator = dataclasses.field(
                metadata={"validate": [validator_b, validator_c]}
            )

        schema_f = class_schema(F)()
        self.assertListEqual(
            schema_f.fields["data"].validators, [validator_a, validator_b, validator_c]
        )

        @dataclasses.dataclass
        class G:
            data: CustomTypeMultiValidator = dataclasses.field()

        schema_g = class_schema(G)()
        self.assertListEqual(
            schema_g.fields["data"].validators, [validator_a, validator_b]
        )

        @dataclasses.dataclass
        class H:
            data: CustomTypeMultiValidator = dataclasses.field(
                metadata={"validate": validator_c}
            )

        schema_h = class_schema(H)()
        self.assertListEqual(
            schema_h.fields["data"].validators, [validator_a, validator_b, validator_c]
        )

        @dataclasses.dataclass
        class J:
            data: CustomTypeMultiValidator = dataclasses.field(
                metadata={"validate": [validator_c, validator_d]}
            )

        schema_j = class_schema(J)()
        self.assertListEqual(
            schema_j.fields["data"].validators,
            [validator_a, validator_b, validator_c, validator_d],
        )
Exemple #28
0
    patience: int
    threshold: float
    threshold_mode: str
    cooldown: int
    min_lr: float
    eps: float


@dataclass
class ModelParams:
    net_params: NetParams
    split_ration: SplitRatio
    dataset_path: str
    BATCH_SIZE: int
    CLIP: int
    N_EPOCHS: int
    model_out_name: str
    lr: float
    src_vocab_name: str
    trg_vocab_name: str
    lr_scheduler: LRSchedulerParams


ModelParamsSchema = class_schema(ModelParams)


def read_training_pipeline_params(path: str) -> ModelParams:
    with open(path, "r") as input_stream:
        schema = ModelParamsSchema()
        return schema.load(yaml.safe_load(input_stream))
Exemple #29
0
 def get_user_by_id(self, id):
     user = TheLanguage.query.filter_by(id=id).first()
     schema = marshmallow_dataclass.class_schema(TheLanguage)
     result_db = schema().dump(user)
     return result_db
Exemple #30
0
    pass

    @validates_schema
    def validate_schema(self, data, **kwargs):
        # Validate that at least one email address were provided if an invite email should be sent
        if data['nutid_invite_v1'].send_email is True and len(
                data['nutid_invite_v1'].emails) == 0:
            raise ValidationError(
                'There must be an email address to be able to send an invite mail.'
            )
        # Validate that there is a primary email address if more than one is requested
        if len(data['nutid_invite_v1'].emails) > 1:
            primary_addresses = [
                email for email in data['nutid_invite_v1'].emails
                if email.primary is True
            ]
            if len(primary_addresses) != 1:
                raise ValidationError(
                    'There must be exactly one primary email address.')


@dataclass(frozen=True)
class InviteResponse(NutidInviteV1, BaseResponse):
    pass


NutidInviteV1Schema = class_schema(NutidInviteV1, base_schema=BaseSchema)
InviteCreateRequestSchema = class_schema(InviteCreateRequest,
                                         base_schema=BaseSchema)
InviteResponseSchema = class_schema(InviteResponse, base_schema=BaseSchema)