Beispiel #1
0
 def test_with_default(self):
     field = fields.Boolean(default=True)
     assert not field.required
     assert field.__schema__ == {'type': 'boolean', 'default': True}
Beispiel #2
0
def _check_load_dump_only(field: ma.Field, operation: str) -> bool:
    if operation == "dump":
        return not field.load_only
    elif operation == "load":
        return not field.dump_only
    else:
        raise ValueError(
            f"Invalid operation: {operation}. Options are 'load' and 'dump'.")


type_map = {
    ma.AwareDateTime:
    lambda val, api, model_name: fr.Raw(example=val.default),
    ma.Bool:
    lambda val, api, model_name: fr.Boolean(example=val.default),
    ma.Boolean:
    lambda val, api, model_name: fr.Boolean(example=val.default),
    ma.Constant:
    lambda val, api, model_name: fr.Raw(example=val.default),
    ma.Date:
    lambda val, api, model_name: fr.Date(example=val.default),
    ma.DateTime:
    lambda val, api, model_name: fr.DateTime(example=val.default),
    # For some reason, fr.Decimal has no example parameter, so use Float instead
    ma.Decimal:
    lambda val, api, model_name: fr.Float(example=val.default),
    ma.Dict:
    lambda val, api, model_name: fr.Raw(example=val.default),
    ma.Email:
    lambda val, api, model_name: fr.String(example=val.default),
Beispiel #3
0
 def test_defaults(self):
     field = fields.Boolean()
     assert not field.required
     assert field.__schema__ == {'type': 'boolean'}
Beispiel #4
0
"""Module responsible for storage of serializers used in Core endpoints."""
from flask_restplus import fields

from medtagger.api import api

out__status = api.model(
    'Status model', {
        'success':
        fields.Boolean(
            description='Should be True if everything is all right.'),
    })
Beispiel #5
0
def register(api, cache):
    ns = api.namespace('upgrades', 'Upgrade Status and Events')

    def get_pending(r):
        if isinstance(r.pending, list) and r.pending:
            return r.pending[0].upgrade_confirmation.date

    def make_discipline(event):
        return {
            'name': event.discipline,
            'display': event.discipline.split('_')[0].title(),
        }

    result = ns.model(
        'Result', {
            'id':
            fields.Integer,
            'place':
            fields.String,
            'time':
            fields.Integer,
            'laps':
            fields.Integer,
            'value':
            fields.Integer(attribute=lambda r: r.points[0].value),
            'sum_value':
            fields.Integer(attribute=lambda r: r.points[0].sum_value),
            'sum_categories':
            fields.List(fields.Integer,
                        attribute=lambda r: r.points[0].sum_categories),
            'notes':
            fields.String(attribute=lambda r: r.points[0].notes),
            'needs_upgrade':
            fields.Boolean(attribute=lambda r: r.points[0].needs_upgrade),
            'rank':
            fields.Integer(
                attribute=lambda r: r.rank[0].value if r.rank else None),
            'pending_date':
            fields.Date(attribute=get_pending),
        })

    person = ns.model(
        'Person', {
            'id':
            fields.Integer,
            'first_name':
            fields.String,
            'last_name':
            fields.String,
            'team_name':
            fields.String,
            'name':
            fields.String(attribute=lambda p: '{} {}'.format(
                p.first_name, p.last_name).title() if p else None),
        })

    series = ns.model('Series', {
        'id': fields.Integer,
        'name': fields.String,
    })

    event = ns.model(
        'Event', {
            'id': fields.Integer,
            'name': fields.String,
            'year': fields.Integer,
            'series': fields.Nested(series, allow_null=True),
        })

    race = ns.model(
        'Race', {
            'id':
            fields.Integer,
            'name':
            fields.String,
            'date':
            fields.Date,
            'starters':
            fields.Integer,
            'categories':
            fields.List(fields.Integer),
            'quality':
            fields.Integer(
                attribute=lambda r: r.quality[0].value if r.quality else None)
        })

    discipline = ns.model('Discipline', {
        'name': fields.String,
        'display': fields.String,
    })

    event_with_discipline = ns.clone(
        'EventWithDiscipline', event, {
            'discipline': fields.Nested(discipline, attribute=make_discipline),
        })

    # Race with event info
    race_with_event = ns.clone('RaceWithEvent', race, {
        'event': fields.Nested(event_with_discipline),
    })

    # Results for upgrades - contains person info and race data
    result_with_person_and_race = ns.clone('ResultWithPersonAndRace', result, {
        'race': fields.Nested(race),
        'person': fields.Nested(person),
    })

    result_with_person_and_race_with_event = ns.clone(
        'ResultWithPersonAndRaceWithEvent', result, {
            'race': fields.Nested(race_with_event),
            'person': fields.Nested(person),
        })

    # Container for grouping results by discipline
    discipline_results = ns.model(
        'DisciplineResultsWithRaceAndPerson', {
            'name': fields.String,
            'display': fields.String,
            'results': fields.List(fields.Nested(result_with_person_and_race)),
        })

    @ns.route('/pending/')
    @ns.response(200, 'Success', [discipline_results])
    @ns.response(500, 'Server Error')
    class UpgradesPending(Resource):
        """
        Get pending upgrades, grouped by discipline, sorted by category and points
        """
        @cache.cached(timeout=cache_timeout)
        def get(self):
            cur_year = date.today().year
            start_date = date(cur_year - 1, 1, 1)
            disciplines = []

            for upgrade_discipline in DISCIPLINE_MAP.keys():
                # Subquery to find the most recent result for each person
                latest_cte = (Result.select().join(Race, src=Result).join(
                    Event, src=Race).join(
                        Person,
                        src=Result).where(Race.date >= start_date).where(
                            Race.categories.length() > 0).where(
                                Event.discipline <<
                                DISCIPLINE_MAP[upgrade_discipline]).select(
                                    fn.DISTINCT(
                                        fn.FIRST_VALUE(Result.id).over(
                                            partition_by=[Result.person_id],
                                            order_by=[
                                                Race.date.desc(),
                                                Race.created.desc()
                                            ],
                                            start=Window.preceding())).
                                    alias('result_id')).cte('latest_results'))

                latest_results = Select(from_list=[
                    fn.JSON_EACH(
                        latest_cte.select_from(
                            fn.JSON_GROUP_ARRAY(
                                latest_cte.c.result_id).python_value(
                                    str)).scalar())
                ],
                                        columns=[Entity('value')])

                query = (Result.select(
                    Result, Race, Event, Person, Points, PendingUpgrade,
                    ObraPersonSnapshot, Rank,
                    Quality).join(Race, src=Result).join(Event, src=Race).join(
                        Person, src=Result).join(Points, src=Result).join(
                            PendingUpgrade,
                            src=Result,
                            join_type=JOIN.LEFT_OUTER).join(
                                ObraPersonSnapshot,
                                src=PendingUpgrade,
                                join_type=JOIN.LEFT_OUTER).join(
                                    Rank,
                                    src=Result,
                                    join_type=JOIN.LEFT_OUTER).join(
                                        Quality,
                                        src=Race,
                                        join_type=JOIN.LEFT_OUTER).
                         where(Result.id << latest_results).where(
                             Points.needs_upgrade == True).order_by(
                                 Points.sum_categories.asc(),
                                 Points.sum_value.desc()))

                disciplines.append({
                    'name':
                    upgrade_discipline,
                    'display':
                    upgrade_discipline.split('_')[0].title(),
                    'results':
                    query.prefetch(Race, Event, Person, Points, PendingUpgrade,
                                   ObraPersonSnapshot, Rank, Quality),
                })

            return ([marshal(d, discipline_results)
                     for d in disciplines], 200, {
                         'Expires':
                         formatdate(timeval=time() + cache_timeout,
                                    usegmt=True)
                     })

    @ns.route('/pending/top/')
    @ns.response(200, 'Success', [result_with_person_and_race_with_event])
    @ns.response(500, 'Server Error')
    class UpgradesPendingTop(Resource):
        """
        Get the top pending upgrades, sorted by category and points
        """
        @cache.cached(timeout=cache_timeout)
        def get(self):
            cur_year = date.today().year
            start_date = date(cur_year - 1, 1, 1)

            # Subquery to find the most recent result for each person
            latest_cte = (Result.select().join(Race, src=Result).join(
                Event, src=Race).join(
                    Person, src=Result).where(Race.date >= start_date).where(
                        Race.categories.length() > 0).select(
                            fn.DISTINCT(
                                fn.FIRST_VALUE(Result.id).over(
                                    partition_by=[Result.person_id],
                                    order_by=[
                                        Race.date.desc(),
                                        Race.created.desc()
                                    ],
                                    start=Window.preceding())).alias(
                                        'result_id')).cte('latest_results'))

            latest_results = Select(from_list=[
                fn.JSON_EACH(
                    latest_cte.select_from(
                        fn.JSON_GROUP_ARRAY(
                            latest_cte.c.result_id).python_value(
                                str)).scalar())
            ],
                                    columns=[Entity('value')])

            query = (Result.select(
                Result, Race, Event, Series, Person, Points, Rank,
                Quality).join(Race, src=Result).join(Event, src=Race).join(
                    Series, src=Event, join_type=JOIN.LEFT_OUTER).join(
                        Person, src=Result).join(Points, src=Result).join(
                            Rank, src=Result, join_type=JOIN.LEFT_OUTER).join(
                                Quality, src=Race,
                                join_type=JOIN.LEFT_OUTER).where(
                                    Result.id << latest_results).where(
                                        Points.needs_upgrade == True).order_by(
                                            Points.sum_categories.asc(),
                                            Points.sum_value.desc()).limit(6))

            return ([
                marshal(r, result_with_person_and_race_with_event)
                for r in query.prefetch(Race, Event, Series, Person, Points,
                                        Rank, Quality)
            ], 200, {
                'Expires':
                formatdate(timeval=time() + cache_timeout, usegmt=True)
            })

    @ns.route('/recent/')
    @ns.response(200, 'Success', [discipline_results])
    @ns.response(500, 'Server Error')
    class UpgradesRecent(Resource):
        """
        Get historical upgrades, grouped by discipline, sorted by date, category, name
        """
        @cache.cached(timeout=cache_timeout)
        def get(self):
            cur_year = date.today().year
            start_date = date(cur_year - 1, 1, 1)
            disciplines = []

            for upgrade_discipline in DISCIPLINE_MAP.keys():
                query = (Result.select(
                    Result, Race, Event, Series, Person, Points,
                    PendingUpgrade, ObraPersonSnapshot, Rank,
                    Quality).join(Race, src=Result).join(Event, src=Race).join(
                        Series, src=Event, join_type=JOIN.LEFT_OUTER).join(
                            Person, src=Result).join(Points, src=Result).join(
                                PendingUpgrade,
                                src=Result,
                                join_type=JOIN.LEFT_OUTER).join(
                                    ObraPersonSnapshot,
                                    src=PendingUpgrade,
                                    join_type=JOIN.LEFT_OUTER).join(
                                        Rank,
                                        src=Result,
                                        join_type=JOIN.LEFT_OUTER).join(
                                            Quality,
                                            src=Race,
                                            join_type=JOIN.LEFT_OUTER).where(
                                                Race.date >= start_date).
                         where(~(Race.name.contains('junior'))).where(
                             Event.discipline <<
                             DISCIPLINE_MAP[upgrade_discipline]).where(
                                 Points.notes.contains('upgraded')
                                 | Points.notes.contains('downgraded')
                             ).order_by(Race.date.desc(),
                                        Points.sum_categories.asc(),
                                        Person.last_name.asc(),
                                        Person.first_name.asc()))

                disciplines.append({
                    'name':
                    upgrade_discipline,
                    'display':
                    upgrade_discipline.split('_')[0].title(),
                    'results':
                    query.prefetch(Race, Event, Series, Person, Points,
                                   PendingUpgrade, ObraPersonSnapshot, Rank,
                                   Quality),
                })

            return ([marshal(d, discipline_results)
                     for d in disciplines], 200, {
                         'Expires':
                         formatdate(timeval=time() + cache_timeout,
                                    usegmt=True)
                     })

    @ns.route('/recent/top/')
    @ns.response(200, 'Success', [result_with_person_and_race_with_event])
    @ns.response(500, 'Server Error')
    class UpgradesRecentTop(Resource):
        """
        Get recent historical upgrades, sorted by date, category, name
        """
        @cache.cached(timeout=cache_timeout)
        def get(self):
            cur_year = date.today().year
            start_date = date(cur_year - 1, 1, 1)

            query = (Result.select(
                Result, Race, Event, Series, Person, Points, Rank,
                Quality).join(Race, src=Result).join(Event, src=Race).join(
                    Series, src=Event,
                    join_type=JOIN.LEFT_OUTER).join(Person, src=Result).join(
                        Points,
                        src=Result).join(Rank,
                                         src=Result,
                                         join_type=JOIN.LEFT_OUTER).join(
                                             Quality,
                                             src=Race,
                                             join_type=JOIN.LEFT_OUTER).where(
                                                 Race.date >= start_date).
                     where(~(Race.name.contains('Junior'))).where(
                         Points.notes.contains('upgraded')
                         | Points.notes.contains('downgraded')).order_by(
                             Race.date.desc(), Points.sum_categories.asc(),
                             Person.last_name.asc(),
                             Person.first_name.asc()).limit(6))

            return ([
                marshal(r, result_with_person_and_race_with_event)
                for r in query.prefetch(Race, Event, Series, Person, Points,
                                        Rank, Quality)
            ], 200, {
                'Expires':
                formatdate(timeval=time() + cache_timeout, usegmt=True)
            })
a_hotel_timestamp = api.model('Timestamp', {
    'dateCreated': fields.DateTime(),
    'dateUpdated': fields.DateTime()
})

a_hotel_details = api.model(
    'Hotel Details', {
        'id': fields.Integer(),
        'name': fields.String(),
        'room': fields.Nested(a_hotel_room),
        'details': fields.String(),
        'checkDates': fields.Nested(a_hotel_check_date),
        'price': fields.Float(),
        'expirationDate': fields.Date(),
        'isExpired': fields.Boolean(),
        'isPackaged': fields.Boolean(),
        'remainingRooms': fields.Integer(),
        'timestamp': fields.Nested(a_hotel_timestamp)
    })

a_create_hotel = api.model(
    'Create Hotel', {
        'name': fields.String(),
        'room': fields.Nested(a_hotel_room),
        'details': fields.String(),
        'checkDates': fields.Nested(a_hotel_check_date),
        'price': fields.Float(),
        'expirationDate': fields.Date(),
        'isExpired': fields.Boolean(),
        'isPackaged': fields.Boolean(),
Beispiel #7
0
from flask_restplus import Resource
from flask import request

from flask_restplus import fields, reqparse
from api import api
from model.todomodel import TodoModel

todo_post = api.model(
    'Todo post', {
        'text':
        fields.String(required=True, description='description of todo'),
        'completed':
        fields.Boolean(required=False, description='todo is complited or not'),
        'completedAt':
        fields.Float(required=False, description='todo completed time')
    })

pagination_arguments = reqparse.RequestParser()
pagination_arguments.add_argument('page',
                                  type=int,
                                  required=False,
                                  default=1,
                                  help='Page number')
pagination_arguments.add_argument('bool',
                                  type=bool,
                                  required=False,
                                  default=1,
                                  help='Page number')
pagination_arguments.add_argument('per_page',
                                  type=int,
                                  required=False,
Beispiel #8
0
from factory.servicefactory import user_service
from factory.servicefactory import rule_service

ns = Namespace("user", description='关于用户数据')

login_arg_parser = ns.parser()
login_arg_parser.add_argument('username',
                              type=str,
                              help='用户名',
                              location='args')
login_arg_parser.add_argument('password', type=str, help='密码', location='args')

login_response = ns.model(
    "登录响应信息", {
        'result': fields.Boolean(description="此用户是否已经验证"),
        'message': fields.String(description="提示信息", default="")
    })


@ns.route('/login')
class LoginApi(Resource):
    @ns.doc("登录")
    @ns.expect(login_arg_parser)
    @ns.response(200, "登录成功", login_response)
    @ns.response(401, "用户密码无效")
    def get(self):
        args = login_arg_parser.parse_args()
        username = args['username']
        password = args['password']
        print(username, password)
Beispiel #9
0
from aedem.utils import dictionarize

from aedem.models import Session
from aedem.models.privileges import Privilege

namespace = Namespace('privileges',
                      path="/privileges",
                      description='Operações de controle de privilégios')

create_privilege_model = namespace.model(
    "create_privilege", {
        "identifier":
        fields.String(description="Identificador do privilégio",
                      required=True),
        "assignable":
        fields.Boolean(description="Permissão para conceder esse privilégio",
                       required=False)
    })


@namespace.route('')
class PrivilegeList(Resource):
    @namespace.doc('list_privileges')
    def get(self):
        '''Lista todos os privilégios'''
        session = Session()

        # get list of privileges
        privileges = []
        for privilege in session.query(Privilege).all():
            privileges.append(dictionarize(privilege))
Beispiel #10
0
                       help="Não pode ser em branco."),
        'nmLocalidade':
        fields.String(required=True,
                      description="Nome da localidade",
                      help="Não pode ser em branco."),
        'nrLatitude':
        fields.Integer(required=True,
                       description="Latitude",
                       help="Não pode ser em branco."),
        'nrLongitude':
        fields.Integer(required=True,
                       description="Longitude",
                       help="Não pode ser em branco."),
        'stAtivo':
        fields.Boolean(required=True,
                       description="Status do cliente",
                       help="Não pode ser em branco."),
        'idCliente':
        fields.Integer(required=True,
                       description="Indice do cliente",
                       help="Não pode ser em branco.")
    })
model_produtor = app.model(
    'Produtor Model', {
        'nrDocumento':
        fields.String(required=True,
                      description="Documento do produtor",
                      help="Não pode ser em branco."),
        'nmProdutor':
        fields.String(required=True,
                      description="Indice do país",
Beispiel #11
0
        one_response['garage_name'] = one_garage
        one_response['status_time'] = datetime.datetime.now().strftime(
            '%Y-%m-%d %I:%M:%S %p')

        response.append(one_response)

    return response


# web related logic

GarageStatusModel = api.model(
    'GarageStatusModel', {
        'garage_name': fields.String(),
        'status': fields.String(),
        'error': fields.Boolean(),
        'message': fields.String(allow_null=True)
    })

NagiosGarageStatusModel = api.inherit(
    'NagiosGarageStatusModel', GarageStatusModel, {
        'return_code': fields.String(),
        'plugin_output': fields.String(),
        'status_time': fields.String(),
        'service_description': fields.String()
    })

GarageStatusResponseModel = api.model(
    'GarageStatusResponseModel', {
        'status': fields.List(fields.Nested(GarageStatusModel)),
        'type': fields.String(default='STATUS'),
Beispiel #12
0
# -*- coding: utf-8 -*-

from flask_restplus import fields
from .. import api


auth_response = api.model('Auth response', {
    'success': fields.Boolean(required=True)
})
Beispiel #13
0
parser_verify.add_argument('Authorization',
                           type=str,
                           help='Bearer Access Token',
                           location='headers',
                           required=True)

_edituser_req = ns.model(
    'edituser_req', {
        'old_username':
        fields.String(required=True, description='old username'),
        'new_username':
        fields.String(required=True, description='username'),
        'new_email':
        fields.String(required=True, description='email'),
        'is_admin':
        fields.Boolean(required=True, description='is admin', default=False)
    })

_deleteuser_req = ns.model(
    'deleteuser_req', {
        'username': fields.String(required=True, description='username'),
        'email': fields.String(required=True, description='email'),
    })

_createuser_req = ns.model(
    'createuser_req', {
        'username':
        fields.String(required=True, description='username'),
        'email':
        fields.String(required=True, description='email'),
        'is_admin':
logger = Logger()

# DTOs

api = Namespace('Auth', description='Authentication-related operations')

auth_login_ldap_dto = api.model(
    'auth_login_ldap', {
        'username': fields.String(required=True, description='LDAP uid'),
        'password': fields.String(required=True, description='LDAP password')
    })

auth_login_ldap_response_dto = api.model(
    'auth_login_ldap_response', {
        'error':
        fields.Boolean(description="True on error, false on success"),
        'message':
        fields.String(description="Some error or success message"),
        'details':
        fields.Nested(api.model(
            'auth_login_ldap_response_details', {
                'token':
                fields.String,
                'expires_at':
                fields.Integer(description="As unix timestamp in seconds")
            }),
                      skip_none=True)
    })

auth_header_token_dto = api.parser()
auth_header_token_dto.add_argument(
Beispiel #15
0
 def test_value(self, value, expected):
     self.assert_field(fields.Boolean(), value, expected)
Beispiel #16
0
            return f"Sent {message} to {phone_number}"
        else:
            return f"Failed to send {message} to {phone_number}: {response.body}"


db_ns = api.namespace('db', description='Database commands')
create_item_model = api.model(
    'Create Item Model', {
        'name': fields.String(required=True),
        'price': fields.Float(required=True),
        'url': fields.String(required=True)
    })
update_item_model = api.model(
    'Insert Row Model', {
        'name': fields.String(required=True),
        'is_purchased': fields.Boolean(required=True)
    })
delete_item_model = api.model('Delete Item Model',
                              {'name': fields.String(required=True)})
item_model = api.model(
    'Item Model', {
        'name': fields.String(required=True),
        'price': fields.Float(required=True),
        'url': fields.String(required=True),
        'is_purchased': fields.Boolean(required=True)
    })


@db_ns.route('/item')
class DB(Resource):
    @db_ns.marshal_list_with(item_model)
Beispiel #17
0
from flask import request
from flask_restplus import Namespace, Resource, fields
from models.project import Project as orm_project
from .utils import PAGINATOR

API = Namespace('projects', description='Project lifecycle and test asset administration')

PROJECT = API.model('Project', {
    'id': fields.String(required=False, description='Unique identifier', example='1'),
    'name': fields.String(required=True, description='Project name', example='CRM Project'),
    'code': fields.String(required=False, description='Generic identifier', example='PRJ-001'),
    'description': fields.String(
        required=False,
        description='Details about project content',
        example='Automation suite for release 101-B'),
    'active': fields.Boolean(required=False, description='In archive?', example='Y/N')
})


# pylint: disable=no-self-use
@API.route('')
class ProjectsList(Resource):
    """Endpoint for project listings results"""

    @API.marshal_list_with(PROJECT)
    @API.response(200, 'Project found')
    @API.expect(PAGINATOR)
    def get(self):
        """
        List all projects
Beispiel #18
0
from _orchest.internals import config as _config

# Namespace: Sessions
server = Model(
    'Server', {
        'url':
        fields.String(required=True, description='URL of the server'),
        'hostname':
        fields.String(
            required=True, default='localhost', description='Hostname'),
        'port':
        fields.Integer(required=True,
                       default=8888,
                       description='Port to access the server'),
        'secure':
        fields.Boolean(required=True,
                       description='Any extra security measures'),
        'base_url':
        fields.String(required=True, default='/', description='Base URL'),
        'token':
        fields.String(required=True, description='Token for authentication'),
        'notebook_dir':
        fields.String(required=True,
                      default=_config.PIPELINE_DIR,
                      description='Working directory'),
        'password':
        fields.Boolean(required=True, description='Password if one is set'),
        'pid':
        fields.Integer(required=True, description='PID'),
    })

session = Model(
Beispiel #19
0
          authorizations=authorizations
         )

# Define the model so that the docs reflect what can be sent
customer_model = api.model('Customer', {
    'customer_id': fields.String(required=True, description='The unique customer id generated by the system'),
    'user_id': fields.String(required=True,
                             description='The unique id given by customer'),
    'first_name': fields.String(required=True,
                                description='The first name of the Customer'),
    'last_name': fields.String(required=True,
                               description='The last name of Customer (e.g., Wang, Gates, etc.)'),
    'password': fields.String(required=True,
                              description='Password'),
    'active': fields.Boolean(required=True,
                              description='Active status'),
    'address': fields.Nested(
        api.model('Address', {
            'id': fields.String(required=True, description='The unique address id generated by the system'),
            'street': fields.String(required=True, description='Street'),
            'apartment': fields.String(required=True, description='Apartment'),
            'city': fields.String(required=True, description='City'),
            'state': fields.String(required=True, description='State'),
            'zip_code': fields.String(required=True, description='Zip code')
        }),
        description='Address of the customer'
    )
})

create_model = api.model('Customer', {
    'user_id': fields.String(required=True,
Beispiel #20
0
    def create_app(self) -> Flask:
        """
        Initialises the the app and the api object. It adds all the provided endpoints.
        Also does this method define the documentation for the swagger UI and the definitions for the api object
        structure.

        :returns:
            the app object
        """

        # check if lingeling is present
        try:
            subprocess.check_output(["lingeling", "--version"])
        except Exception as e:
            raise Exception(
                "The SAT solver binary could not be called. "
                "Please make sure that lingeling is build and present in the path."
            ) from e

        app = Flask(__name__)
        if app.config['DEBUG']:
            app.config['PROFILE'] = True
            from werkzeug.middleware.profiler import ProfilerMiddleware
            app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
        CORS(app)

        # This lets the child processes ignore the SIG int signal handler.
        original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN)

        pool: ProcessPool = ProcessPool(
            max_workers=int(multiprocessing.cpu_count() / 2))

        app.config['RESTPLUS_VALIDATE'] = True

        data_store = DataStore(self.data_path)

        api = Api(
            app,
            version='1.0',
            title='Linear layout API',
            description=
            'Through this API one can request for a linear layout of a graph in graphml format. \n'
            'The actual computation of the linear layout is done using SAT solving. '
            'The instances are solved using [lingeling](http://fmv.jku.at/lingeling/)\n'
            'See https://github.com/linear-layouts/SAT for more information')

        #: The schema definition of a page
        page_schema = api.model(
            'Page', {
                'id':
                fields.String(required=True,
                              description='The id of this page',
                              example="P1"),
                'type':
                fields.String(description='The type of the page. \n'
                              'NONE allows all patterns',
                              enum=['QUEUE', 'STACK', 'NONE'],
                              required=True),
                'constraint':
                fields.String(
                    description='Additional constraints for the page',
                    enum=['NONE', 'DISPERSIBLE', 'TREE', 'FOREST'])
            })

        #: The schema definition of a constraint
        constraint_schema = api.model(
            'Constraint',
            {
                'type':
                fields.String(
                    description="""
                                      EDGES_ON_PAGES: assigns edges to specific pages. The edges are encoded 
                                                      independently from each other
                                      arguments: edge ids
                                      modifier: page ids to assign the edges to (OR joined)
                                      
                                      EDGES_SAME_PAGES: assigns edges to the same page. Only implemented up to to 
                                                        four pages
                                      arguments: the edge ids
                                      modifier: none
                                      
                                      EDGES_DIFFERENT_PAGES: all edges have to be on different pages. Only works up to 
                                                             as many edges as there are pages
                                      arguments: the edge ids
                                      modifier none
                                      
                                      NOT_ALL_IN_SAME_PAGE: not all edges can be on the same page. Only works when at least  
                                                            two pages are available
                                      arguments: the edge ids
                                      modifier none
                                      
                                      EDGES_TO_SUB_ARC_ON_PAGES: If any node shares an edge with the nodes named in 
                                                                 arguments and is between the two nodes, then this edge 
                                                                 is restricted to the pages named in modifier.
                                      arguments: the two vertexes to restrict the edges from
                                      modifier: the pages to restrict the edges to
                                      
                                      EDGES_FROM_NODES_ON_PAGES: All edges involving the nodes have to be on the given page. 
                                      arguments: the vertices to restrict the edges from
                                      modifier: the pages to restrict the edges to
                                      
                                      NODES_PREDECESSOR: one set of nodes are before another set of nodes
                                      arguments: the node ids to be before 
                                      modifier: the node ids to be after
                                      
                                      NODES_ABSOLUTE_ORDER: deprecated. see NODES_REQUIRE_ABSOLUTE_ORDER
                                      
                                      NODES_REQUIRE_ABSOLUTE_ORDER: The given nodes have to be in exactly the given 
                                                                    order and no nodes are allowed in between.
                                      arguments: the nodes in the required order
                                      modifier: none 
                                      
                                      NODES_REQUIRE_PARTIAL_ORDER: The given nodes have to be the given relative order
                                      arguments: the nodes in the order
                                      modifier: none 
                                      
                                      NODES_FORBID_PARTIAL_ORDER: The given nodes have to be NOT the given relative order. 
                                                                    Two nodes flipped already satisfy this constraint.
                                      arguments: the nodes in the forbidden order
                                      modifier: none 
                                      
                                      NODES_CONSECUTIVE: The given two nodes have to be next to each other in any order. 
                                                         Currently only implemented for 2 Nodes
                                      arguments: the two nodes to be made consecutive
                                      modifier: none 
                                      
                                      NODES_SET_FIRST: The given node has to be the first in any order.
                                      arguments: the node to be the first
                                      modifier: none  
                                      """,
                    enum=[
                        "EDGES_ON_PAGES", "EDGES_SAME_PAGES",
                        "EDGES_DIFFERENT_PAGES", "NOT_ALL_IN_SAME_PAGE",
                        "EDGES_TO_SUB_ARC_ON_PAGES",
                        "EDGES_FROM_NODES_ON_PAGES", "NODES_PREDECESSOR",
                        "NODES_ABSOLUTE_ORDER", "NODES_REQUIRE_ABSOLUTE_ORDER",
                        "NODES_REQUIRE_PARTIAL_ORDER",
                        "NODES_FORBID_PARTIAL_ORDER", "NODES_CONSECUTIVE",
                        "NODES_SET_FIRST"
                    ],
                    example="NODES_PREDECESSOR",
                    required=True),
                'arguments':
                fields.List(
                    fields.String,
                    min_items=1,
                    required=True,
                    description=
                    'The ids of the elements affected by this constraint',
                    example=["1"]),
                'modifier':
                fields.List(fields.String,
                            description='The ids of the constraint modifier.',
                            example=["0"]),
            },
        )
        assigment_schema = api.model(
            'Assigment', {
                'edge':
                fields.String(description='The id of the edge', required=True),
                'page':
                fields.String(
                    description='The id of the page the edge is assigned to',
                    required=True)
            })
        error_schema = api.model(
            'Error', {
                'message':
                fields.String(description='The error message',
                              required=True,
                              readonly=True)
            })

        # the schema definition for the full linear layout
        linear_layout_schema = api.model(
            'Linear layout', {
                'id':
                fields.Integer(description='The id of the embedding',
                               readonly=True),
                'graph':
                fields.String(
                    description=
                    'This field contains a graphml definition encoded with base64. '
                    'The example value is K3.',
                    required=True,
                    example=self._k3_str_as_example),
                'pages':
                fields.List(fields.Nested(page_schema),
                            min_items=1,
                            required=True,
                            unique=True),
                'constraints':
                fields.List(fields.Nested(constraint_schema)),
                'status':
                fields.String(
                    description=
                    'The current processing status of the computation',
                    enum=['IN_PROGRESS', 'FINISHED', 'FAILED'],
                    readonly=True),
                'assignments':
                fields.List(fields.Nested(assigment_schema),
                            readonly=True,
                            description='A list of edge to page assignments'),
                'vertex_order':
                fields.List(
                    fields.String,
                    readonly=True,
                    description=
                    'The order in which the vertices have to be placed on the spine.'
                ),
                'satisfiable':
                fields.Boolean(
                    readonly=True,
                    description=
                    'On finished instances this field indicates if the given '
                    'problem is satisfiable'),
                'rawSolverResult':
                fields.String(
                    readonly=True,
                    description=
                    'This field contains the comment lines of the solver which '
                    'provides some data on the solved SAT instance'),
                'message':
                fields.String(
                    readonly=True,
                    description=
                    "This field contains currently the error message from "
                    "the background processing"),
                'created':
                fields.DateTime(
                    readonly=True,
                    description='A timestamp when this instance was created'),
                'finished':
                fields.DateTime(
                    readonly=True,
                    description='A timestamp when this instance was solved'),
            })

        list_parser = api.parser()
        list_parser.add_argument('limit',
                                 type=int,
                                 help='How many objects should be returned',
                                 location='query',
                                 default=20)
        list_parser.add_argument('offset',
                                 type=int,
                                 help='Where to start counting',
                                 location='query',
                                 default=0)

        @api.route('/embeddings')
        class EmbeddingList(Resource):
            @api.doc('list_embeddings')
            @api.response(code=200,
                          description="Success",
                          model=[linear_layout_schema])
            @api.response(code=500,
                          description="Server Error",
                          model=error_schema)
            @api.expect(list_parser)
            def get(self):
                """
                List all embeddings
                """

                limit = int(request.args.get('limit', 20))
                if (limit < 1) or (limit > 50):
                    abort(400, "limit has to be in range [1,50]")
                offset = int(request.args.get('offset', 0))
                if offset < 0:
                    abort(400, "offset has to be not negative")

                return jsonify(data_store.get_all(limit=limit, offset=offset))

            @api.doc('create_embedding')
            @api.expect(linear_layout_schema)
            @api.param('async',
                       'Should the processing be handled asynchronous',
                       location="query",
                       default=False,
                       type=bool)
            @api.response(code=200,
                          description="Success",
                          model=linear_layout_schema)
            @api.response(code=500,
                          description="Server Error",
                          model=error_schema)
            @api.response(code=501,
                          description="Not Implemented",
                          model=error_schema)
            @api.response(code=400,
                          description="Bad Request",
                          model=error_schema)
            def post(self):
                """
                Create a new embedding
                """
                entity = request.get_json()

                # looks weird but is the only reliable way to find out if a string value is a true boolean ;-)
                # see https://stackoverflow.com/questions/715417/converting-from-a-string-to-boolean-in-python
                handle_async = request.args.get('async', "",
                                                type=str).lower() in yes_list
                try:
                    entity['created'] = datetime.datetime.now(
                        datetime.timezone.utc).isoformat()

                    b64_graph_str = entity.get('graph')
                    try:
                        graph_str = base64.b64decode(b64_graph_str)
                        node_ids, edges = get_nodes_and_edges_from_graph(
                            graph_str)
                    except Exception as e:
                        app.logger.exception(e)
                        raise BadRequest(
                            "The graph string has to be a base64 encoded graphml string! "
                            "The exact error was: " + str(e))

                    len_nodes = len(node_ids)
                    len_edges = len(edges)

                    if len_edges > 1900 or len_nodes > 600:
                        raise BadRequest(
                            "For fairness reasons this API will only handle graphs with less than 300 vertices and 900 "
                            "edges. Your graph has {} vertices and {} edges which exceed the limit."
                            "".format(len_nodes, len_edges))

                    for e in edges:
                        if e.source == e.target:
                            raise BadRequest(
                                "The Implementation only supports graphs where "
                                "every edge has two distinct start and end nodes"
                            )

                    # ignore double edges
                    # # validate for no double edges
                    # all_edge_endpoints = [{e.source, e.target} for e in edges]
                    # duplicate_edges = get_duplicates(all_edge_endpoints)
                    # if len(duplicate_edges) > 0:
                    #     abort(400,
                    #           "Multiedges are not allowed. "
                    #           "The following edges were recognized as duplicate {}".format(duplicate_edges))

                    # validate for unique edge ids
                    duplicate_edge_ids = get_duplicates([e.id for e in edges])
                    if len(duplicate_edge_ids) > 0:
                        abort(
                            400, "Edge ids have to be unique"
                            "The following ids were recognized as duplicate {}"
                            .format(duplicate_edge_ids))

                    # validate page id uniqueness
                    page_ids = [p['id'] for p in entity.get('pages')]
                    duplicate_page_ids = get_duplicates(page_ids)
                    if len(duplicate_page_ids) > 0:
                        abort(
                            400, "Duplicated page ids are not allowed. "
                            "The following id were recognized as duplicate {}".
                            format(duplicate_page_ids))

                    entity['status'] = 'IN_PROGRESS'
                    entity = data_store.insert_new_element(entity)

                    # validate graph not empty
                    if len(page_ids) == 0 or len_edges == 0 or len_nodes == 0:
                        abort(
                            400,
                            "Please submit a graph with at least one node, edge and page"
                        )

                    if handle_async:
                        # abort(501, "Async handling is not enabled.")
                        future_result: ProcessFuture = pool.schedule(
                            SolverInterface.solve,
                            (node_ids, edges, entity.get('pages'),
                             entity.get('constraints'), entity['id']))
                        future_result.add_done_callback(
                            processing_finished_callback)

                        future_result.done()
                        # remove old futures
                        remove_old_jobs()
                        jobs.append(QueueItem(entity.get('id'), future_result))

                    else:
                        try:
                            entity = handle_solver_result(
                                SolverInterface.solve(
                                    node_ids, edges, entity.get('pages'),
                                    entity.get('constraints'), entity['id']))
                        except Exception as e1:
                            error_callback(e1)
                            entity = data_store.get_by_id(entity['id'])

                    return jsonify(entity)
                except HTTPException as e:
                    raise e
                except Exception as e:
                    raise InternalServerError(
                        "The error {} \noccured from this body \n{}".format(
                            str(e), request.get_data(as_text=True))) from e

        @api.route('/embeddings/<string:id>')
        @api.response(404, 'Embedding not found', model=error_schema)
        @api.param('id', 'The task identifier')
        class SingleEmbedding(Resource):
            @api.doc('get_embedding')
            @api.response(code=200,
                          description="Success",
                          model=linear_layout_schema)
            def get(self, id):
                """
                Get an embedding by id
                """
                element = data_store.get_by_id(id)
                if not element:
                    raise NotFound(
                        "The given id {} was not present in the data store".
                        format(id))
                else:
                    return jsonify(element)

            @api.doc('delete_embedding')
            @api.response(code=200,
                          description="Success",
                          model=linear_layout_schema)
            def delete(self, id):
                """
                Cancel the computation for the given id.
                """
                element = data_store.get_by_id(id)
                if not element:
                    raise NotFound(
                        "The given id {} was not present in the data store".
                        format(id))
                j_tmp = [j for j in jobs if str(j.id) == str(id)]
                if len(j_tmp) == 1:
                    j_tmp[0].future.cancel()
                    element['status'] = 'FAILED'
                    element['message'] = 'The job was cancelled by user'
                    data_store.update_entry(id, element)
                    jobs.remove(j_tmp[0])
                return jsonify(element)

        def processing_finished_callback(res: ProcessFuture):
            if not res.done() or res.cancelled():
                pass
            else:
                exception = res.exception()
                if exception is not None:
                    error_callback(exception)
                else:
                    result = res.result()
                    handle_solver_result(result)

        def error_callback(e_param: BaseException):
            try:
                raise e_param
            except IdRelatedException as e:
                id = e.entity_id
                entity = data_store.get_by_id(id)
                if not entity:
                    raise e
                entity['status'] = 'FAILED'
                entity['message'] = e.message

                data_store.update_entry(id, entity)
                if type(e.cause) is HTTPException:
                    raise e.cause
                else:
                    raise e

        def handle_solver_result(result: SolverResult):
            entity = data_store.get_by_id(result.entity_id)
            if not entity:
                raise Exception(
                    "The given id {} was not found in the data store".format(
                        result.entity_id))
            entity['status'] = 'FINISHED'
            entity['satisfiable'] = result.satisfiable
            entity['assignments'] = result.page_assignments
            entity['vertex_order'] = result.vertex_order
            entity['rawSolverResult'] = result.solver_output
            entity['finished'] = datetime.datetime.now(
                datetime.timezone.utc).isoformat()
            entity = data_store.update_entry(result.entity_id, entity)
            print(
                "Finished job with id {} in {} s. "
                "Including waiting time in the queue".format(
                    entity['id'],
                    str(parse(entity['finished']) - parse(entity['created']))))
            return entity

        def signal_handler(sig, frame):
            data_store.prepare_shutdown()
            remove_old_jobs()
            print(
                "Shutdown request. "
                "Currently {} Jobs are in queue and will be processed on server start."
                .format(len(jobs)))
            try:
                pool.stop()
                pool.join(timeout=2)
            finally:
                original_sigint_handler()

        signal.signal(signal.SIGINT, signal_handler)

        unfinished_jobs = data_store.get_unfinished_jobs()
        if len(unfinished_jobs) > 0:
            print("Resuming {} unfinished jobs".format(len(unfinished_jobs)))
        for job in unfinished_jobs:
            b64_graph_str = job.get('graph')
            graph_str = base64.b64decode(b64_graph_str)
            node_ids, edges = get_nodes_and_edges_from_graph(graph_str)

            future: ProcessFuture = pool.schedule(
                SolverInterface.solve, (node_ids, edges, job.get('pages'),
                                        job.get('constraints'), job['id']))
            future.add_done_callback(processing_finished_callback)
            jobs.append(QueueItem(job.get('id'), future))

        return app
Beispiel #21
0
come_out_message_user = api.model(
    'come_out_message_user', {
        'id': fields.Integer(description="id"),
        'username': fields.String(description="用户名"),
        'nickname': fields.String(description="用户昵称"),
        'img_url': fields.String(description="头像地址")
    })

come_out_message = api.model(
    'come_out_message', {
        'user_from': fields.Nested(model=come_out_message_user,
                                   description="发信人"),
        'user_to': fields.Nested(model=come_out_message_user,
                                 description="收信人"),
        'contant': fields.String(description="内容数据"),
        'checked': fields.Boolean(description="是否已读"),
        'last_login_time': fields.DateTime(description="时间")
    })

come_out_message_success = api.model(
    'come_out_message_success', {
        'code':
        fields.Integer(description="状态码"),
        'success':
        fields.Boolean(description="是否成功"),
        'data':
        fields.Nested(model=come_out_message, as_list=True, description="内容数据")
    })


@api.errorhandler(ProfileError)
Beispiel #22
0
    location="args",
    type=str,
    help="Campo utilizado para ordernar a lista de resultados",
    choices=["name", "created_at"],
)
get_list_model.add_argument("order_ascending",
                            default=True,
                            required=False,
                            location="args",
                            type=bool,
                            help="Ordernar os resultados de forma ascendente")

list_datafile_response_model = {
    "total":
    fields.Integer(
        required=True,
        description=
        "Corresponde ao numero total de documentos que o usuário possui")
}

list_datafile_response_model = make_response_model(
    list_datafile_response_model)

##### DELETE MODELS
delete_response_model = {
    "deleted":
    fields.Boolean(required=True,
                   description="Confirma se o arquivo foi excluido ou não")
}
delete_response_model = make_response_model(delete_response_model)