コード例 #1
0
def test_system_info():
    schema = Schema({"os": Any("windows", "mac", "linux")}, required=True)

    system = platform.system()

    if system == "Windows":
        schema = schema.extend(
            {
                "windows_version_build": int,
                "windows_version_major": int,
                "windows_version_minor": int,
                "windows_version_service_pack": str,
            }
        )

    if system == "Darwin":
        schema = schema.extend({"mac_version": str})

    if system == "Linux":
        schema = schema.extend(
            {
                "linux_distro": str,
                "linux_distro_like": str,
                "linux_distro_version": str,
            }
        )

    assert schema(analytics._system_info())
コード例 #2
0
ファイル: tests.py プロジェクト: spacegaier/voluptuous
def test_subschema_extension():
    """Verify that Schema.extend adds and replaces keys in a subschema"""
    base = Schema({'a': {'b': int, 'c': float}})
    extension = {'d': str, 'a': {'b': str, 'e': int}}
    extended = base.extend(extension)

    assert_equal(base.schema, {'a': {'b': int, 'c': float}})
    assert_equal(extension, {'d': str, 'a': {'b': str, 'e': int}})
    assert_equal(extended.schema, {'a': {'b': str, 'c': float, 'e': int}, 'd': str})
コード例 #3
0
ファイル: tests.py プロジェクト: spacegaier/voluptuous
def test_schema_extend_overrides():
    """Verify that Schema.extend can override required/extra parameters."""
    base = Schema({'a': int}, required=True)
    extended = base.extend({'b': str}, required=False, extra=ALLOW_EXTRA)

    assert base.required is True
    assert base.extra == PREVENT_EXTRA
    assert extended.required is False
    assert extended.extra == ALLOW_EXTRA
コード例 #4
0
ファイル: tests.py プロジェクト: spacegaier/voluptuous
def test_schema_extend_key_swap():
    """Verify that Schema.extend can replace keys, even when different markers are used"""
    base = Schema({Optional('a'): int})
    extension = {Required('a'): int}
    extended = base.extend(extension)

    assert_equal(len(base.schema), 1)
    assert_true(isinstance(list(base.schema)[0], Optional))
    assert_equal(len(extended.schema), 1)
    assert_true((list(extended.schema)[0], Required))
コード例 #5
0
ファイル: tests.py プロジェクト: tuukkamustonen/voluptuous
def test_subschema_extension():
    """Verify that Schema.extend adds and replaces keys in a subschema"""

    base = Schema({'a': {'b': int, 'c': float}})
    extension = {'d': str, 'a': {'b': str, 'e': int}}
    extended = base.extend(extension)

    assert_equal(base.schema, {'a': {'b': int, 'c': float}})
    assert_equal(extension, {'d': str, 'a': {'b': str, 'e': int}})
    assert_equal(extended.schema, {'a': {'b': str, 'c': float, 'e': int}, 'd': str})
コード例 #6
0
ファイル: tests.py プロジェクト: jimmyngo/voluptuous
def test_schema_extend_overrides():
    """Verify that Schema.extend can override required/extra parameters."""

    base = Schema({'a': int}, required=True)
    extended = base.extend({'b': str}, required=False, extra=voluptuous.ALLOW_EXTRA)

    assert base.required == True
    assert base.extra == voluptuous.PREVENT_EXTRA
    assert extended.required == False
    assert extended.extra == voluptuous.ALLOW_EXTRA
コード例 #7
0
ファイル: tests.py プロジェクト: tuukkamustonen/voluptuous
def test_schema_extend_key_swap():
    """Verify that Schema.extend can replace keys, even when different markers are used"""

    base = Schema({Optional('a'): int})
    extension = {Required('a'): int}
    extended = base.extend(extension)

    assert_equal(len(base.schema), 1)
    assert_true(isinstance(list(base.schema)[0], Optional))
    assert_equal(len(extended.schema), 1)
    assert_true((list(extended.schema)[0], Required))
コード例 #8
0
def test_schema_extend():
    """Verify that Schema.extend copies schema keys from both."""

    base = Schema({'a': int}, required=True)
    extension = {'b': str}
    extended = base.extend(extension)

    assert base.schema == {'a': int}
    assert extension == {'b': str}
    assert extended.schema == {'a': int, 'b': str}
    assert extended.required == base.required
    assert extended.extra == base.extra
コード例 #9
0
ファイル: tests.py プロジェクト: jimmyngo/voluptuous
def test_schema_extend():
    """Verify that Schema.extend copies schema keys from both."""

    base = Schema({'a': int}, required=True)
    extension = {'b': str}
    extended = base.extend(extension)

    assert base.schema == {'a': int}
    assert extension == {'b': str}
    assert extended.schema == {'a': int, 'b': str}
    assert extended.required == base.required
    assert extended.extra == base.extra
コード例 #10
0
BULB_SCHEMA = PLUG_SCHEMA.extend({
    "ctrl_protocols":
    Optional(dict),
    "description":
    Optional(str),  # TODO: LBxxx similar to dev_name
    "dev_state":
    lb_dev_state,
    "disco_ver":
    str,
    "heapsize":
    int,
    "is_color":
    check_int_bool,
    "is_dimmable":
    check_int_bool,
    "is_factory":
    bool,
    "is_variable_color_temp":
    check_int_bool,
    "light_state": {
        "brightness":
        All(int, Range(min=0, max=100)),
        "color_temp":
        int,
        "hue":
        All(int, Range(min=0, max=255)),
        "mode":
        str,
        "on_off":
        check_int_bool,
        "saturation":
        All(int, Range(min=0, max=255)),
        "dft_on_state":
        Optional({
            "brightness": All(int, Range(min=0, max=100)),
            "color_temp": All(int, Range(min=2000, max=9000)),
            "hue": All(int, Range(min=0, max=255)),
            "mode": str,
            "saturation": All(int, Range(min=0, max=255)),
        }),
        "err_code":
        int,
    },
    "preferred_state": [{
        "brightness": All(int, Range(min=0, max=100)),
        "color_temp": int,
        "hue": All(int, Range(min=0, max=255)),
        "index": int,
        "saturation": All(int, Range(min=0, max=255)),
    }],
})
コード例 #11
0
# Business Logic
#
#####################
MINIMUM_CLOUDTRAIL_CONFIGURATION = Schema(
    {
        "S3BucketName": str,
        "SnsTopicName": str,
        "SnsTopicARN": str,
        "IsMultiRegionTrail": True,
        "TrailARN": str,
    },
    extra=ALLOW_EXTRA,
    required=True)

IDEAL_CLOUDTRAIL_CONFIGURATION = MINIMUM_CLOUDTRAIL_CONFIGURATION.extend(
    {
        "IsOrganizationTrail": True,
    }, extra=ALLOW_EXTRA, required=True)


def safe_check(schema, data):
    try:
        return schema(data)
    except Exception:
        logger.debug(f'Data {pformat(data)} did not match schema {schema}',
                     exc_info=True)
        return None


def keep_valid(schema, xs):
    return [y for y in [safe_check(schema, x) for x in xs] if y is not None]
コード例 #12
0
ファイル: validations.py プロジェクト: macndesign/mont_scrap
            if date is not None:
                return date
            else:
                raise ValueError
        except ValueError:
            raise Invalid('<{0}> is not a valid datetime.'.format(value))
    return fn


base_query_param_schema = Schema(
    {
        'q': str,
        'name': str,
        'offset': IntegerLike(),
        'limit': IntegerLike(),
        'install_ts': DatetimeWithTZ(),
        'update_ts': DatetimeWithTZ()
    },
    extra=ALLOW_EXTRA
)


company_query_schema = base_query_param_schema.extend(
    {
        "id": IntegerLike(),
        "name": str,
        "description": str,
        "auction_id": CSVofIntegers(),  # /?team_id=1,2,3
    }
)
コード例 #13
0
ファイル: organisation.py プロジェクト: openpermissions/perch
        :param user: a User
        :param data: data that the user wants to update
        """
        is_admin = user.is_admin()
        is_reseller_preverifying = user.is_reseller() and data.get('pre_verified', False)
        raise Return(is_admin or is_reseller_preverifying)


all_permission_schema = Schema({
    'type': 'all',
    'permission': In(PERMISSIONS),
    'value': None
}, required=True)
organisation_permission_schema = all_permission_schema.extend({
    'type': 'organisation_id',
    'permission': In(PERMISSIONS),
    'value': unicode
})
service_type_permission_schema = all_permission_schema.extend({
    'type': 'service_type',
    'permission': In(PERMISSIONS),
    'value': In(SERVICE_TYPES)
})


def group_permissions(permissions):
    """
    Groups a permissions list

    Returns a dictionary, with permission types as keys and sets of entities
    with access to the resource as values, e.g.:
コード例 #14
0
def validate(d):

    Vector3d = All(
        Any([Number(), Number(), Number()], (Number(), Number(), Number())),
        Length(min=3, max=3))

    base_schema = Schema(
        {
            'units':
            'SI',
            'scale':
            Vector3d,
            Required('reference'):
            All(str, Length(min=1)),
            'partitioner':
            Any('metis', 'scotch', 'high order load balancing'),
            Required('safe', default=False):
            Boolean(),
            'initial':
            Any(str, {
                Required('name'): str,
                Required('func'): iscallable
            }),
            Required('restart', default=False):
            Boolean(),
            'restart casename':
            All(str, Length(min=1)),
            'restart ignore history':
            Boolean(),
            'preconditioner': {
                Required('factor'): Number()
            },
            Required('equations'):
            Any('euler', 'RANS', 'viscous', 'LES', 'DGviscous', 'DGRANS',
                'DGeuler'),  # , 'DGLES'),
            'report': {
                Required('frequency'): All(Coerce(int), Range(min=1)),
                'monitor': dict,
                'forces': dict,
                Required('Scale residuals by volume', default=False):
                Boolean()
            },
            'time marching':
            dict,
            'cell order':
            list,
            'Nodal Locations': {
                'Line': dict,
                'Tetrahedron': dict,
                'Tri': dict
            },
            Required('material', default='air'):
            All(str, Length(min=1)),
            'write output': {
                Required('frequency'):
                All(Coerce(int), Range(min=1)),
                Required('format'):
                Any('none', 'vtk', 'ensight', 'native'),
                Required('no volume vtk', default=False):
                Boolean(),
                'surface variables':
                list,
                'volume variables':
                list,
                'surface interpolate':
                list,
                'volume interpolate':
                list,
                'start output real time cycle':
                All(Coerce(int), Range(min=0)),
                'output real time cycle frequency':
                All(Coerce(int), Range(min=1)),
                'variable_name_alias':
                dict,
                'unsteady restart file output frequency':
                All(Coerce(int), Range(min=1))
            },
        },
        extra=ALLOW_EXTRA)

    d = base_schema(d)

    material_key = d['material']
    reference_key = d['reference']
    equations_key = d['equations']
    ic_keys = [key for key in d.keys() if key.startswith('IC_')]
    bc_keys = [key for key in d.keys() if key.startswith('BC_')]
    fz_keys = [key for key in d.keys() if key.startswith('FZ_')]

    material_schema = Schema({
        Required('gamma', default=1.4):
        Number(),
        Required('gas constant', default=287.0):
        Number(),
        Required('Sutherlands const', default=110.4):
        Number(),
        Required('Prandtl No', default=0.72):
        Number(),
        Required('Turbulent Prandtl No', default=0.9):
        Number(),
        'gravity':
        Vector3d,
        'latitude':
        Number()
    })

    ic_schema = Schema(
        {
            'pressure': Number(),
            'temperature': Number(),
            'V': {
                'vector': Vector3d,
                'Mach': Number(),
            },
            'Reference Length': Number(),
            'Reynolds No': Number(),
            'turbulence intensity': Number(),
            'eddy viscosity ratio': Number(),
            'ambient turbulence intensity': Number(),
            'ambient eddy viscosity ratio': Number(),
            'location': Vector3d,
            'profile': {
                'ABL': {
                    'roughness length': Number(),
                    'friction velocity': Number(),
                    'surface layer height': Number(),
                    'Monin-Obukhov length': Number(),
                    'TKE': Number(),
                    'z0': Number(),
                },
                'field': All(str, IsFile()),
                'local profile': Boolean()
            },
            'static pressure ratio': Number(),
            'total pressure ratio': Number(),
            'total temperature ratio': Number(),
            'reference': str,
            'viscosity': Number()
        },
        extra=ALLOW_EXTRA)

    timemarching_schema = Schema({
        'unsteady': {
            'total time': Number(),
            'time step': Number(),
            'order': Any('first', 'second', 1, 2),
            'start': Coerce(int)
        },
        Required('scheme'): {
            'name': Any('euler', 'runge kutta', 'lu-sgs'),
            'stage': Any(1, 'rk third order tvd', 4, 5),
            'class': Object,
            'kind': Any('local timestepping', 'global timestepping'),
            'linear gradients': Boolean()
        },
        Required('lu-sgs', default={}): {
            Required('Include Backward Sweep', default=True):
            Boolean(),
            Required('Number Of SGS Cycles', default=8):
            All(Coerce(int), Range(min=1)),
            Required('Jacobian Epsilon', default=1.0e-8):
            Number(),
            Required('Include Relaxation', default=True):
            Boolean(),
            Required('Jacobian Update Frequency', default=1):
            All(Coerce(int), Range(min=1)),
            Required('Finite Difference Jacobian', default=False):
            Boolean(),
            Required('Use Rusanov Flux For Jacobian', default=True):
            Boolean()
        },
        Required('cfl'):
        Number(),
        'cfl transport':
        Number(),
        'cfl coarse':
        Number(),
        'cfl ramp factor': {
            Required('growth'): Number(),
            Required('initial'): Number()
        },
        'cfl transport for pmg levels':
        list,
        'cfl for pmg levels':
        list,
        'ramp func':
        iscallable,
        Required('cycles'):
        All(Coerce(int), Range(min=1)),
        'multigrid':
        All(Coerce(int), Range(min=1)),
        'multigrid cycles':
        All(Coerce(int), Range(min=1)),
        'multigrid ramp':
        Number(),
        'prolong factor':
        Number(),
        'prolong transport factor':
        Number(),
        Required('multipoly', default=False):
        Boolean(),
        'multipoly cycle pattern':
        list,
        'multipoly convect only':
        Boolean(),
        'multipoly relaxation':
        Number(),
        'High Order Filter Frequency':
        Coerce(int),
        'number of time step smoothing iterations':
        Coerce(int),
        Required('cfl viscous factor', default=1.0):
        Number()
    })

    fv_euler_schema = Schema({
        Required('order'):
        Any('first', 'second', 'euler_second'),
        Required('limiter', default='vanalbada'):
        'vanalbada',
        Required('precondition', default=False):
        Boolean()
    })

    viscous_schema = fv_euler_schema.extend({
        Required('turbulence', default={}): {
            Required('les', default='none'): Any('none', 'WALE'),
        }
    })

    rans_schema = fv_euler_schema.extend({
        Required('turbulence', default={}): {
            Required('model'):
            Any('sst', 'sas', 'sa-neg'),
            Required('les', default='none'):
            Any('none', 'DES', 'DDES', 'IDDES', 'SAS'),
            Required('betastar', default=0.09):
            Number(),
            'limit mut':
            Boolean(),
            'CDES_kw':
            Number(),
            'CDES_keps':
            Number(),
            'production':
            Coerce(int),
            'rotation correction':
            Boolean(),
            'CDES':
            Number()
        }
    })

    dg_euler_schema = Schema({
        Required('order'):
        Any(0, 1, 2, 3, 4),
        Required('precondition', default=False):
        Boolean(),
        Required('c11 stability parameter', default=0.0):
        Number(),
        Required('c11 stability parameter transport', default=0.0):
        Number(),
        Required('LDG upwind parameter', default=0.5):
        Number(),
        'LDG upwind parameter aux':
        Number(),
        Required('Use MUSCL Reconstruction', default=False):
        Boolean(),
        'Approximate curved boundaries':
        Boolean(),
        'Filtering Cut-on Order':
        Coerce(int),
        'Filtering Epsilon':
        Coerce(int),
        'Filtering Strength':
        Coerce(int),
        'Inviscid Flux Scheme':
        Any('HLLC', 'Rusanov')
    })

    dg_viscous_schema = dg_euler_schema.extend({
        Required('BR2 Diffusive Flux Scheme', default=False):
        Boolean(),
        'Shock Sensing':
        Boolean(),
        'Shock Sensing k':
        Number(),
        'Shock Sensing Viscosity Scale':
        Number(),
        'Shock Sensing Variable':
        Any('density', 'temperature', 'mach', 'turbulence')
    })

    dg_rans_schema = dg_euler_schema.extend({
        Required('turbulence', default={}): {
            Required('model'):
            Any('sst', 'sas', 'sa-neg'),
            Required('les', default='none'):
            Any('none', 'DES', 'DDES', 'IDDES', 'SAS'),
            Required('betastar', default=0.09):
            Number(),
            'limit mut':
            Boolean(),
            'CDES_kw':
            Number(),
            'CDES_keps':
            Number(),
            'production':
            Coerce(int),
            'rotation correction':
            Boolean(),
            'CDES':
            Number()
        },
        Required('BR2 Diffusive Flux Scheme', default=False):
        Boolean(),
        Required('Use Rusanov for turbulence equations', default=False):
        Boolean(),
        'Shock Sensing':
        Boolean(),
        'Shock Sensing k':
        Number(),
        'Shock Sensing Viscosity Scale':
        Number(),
        'Shock Sensing Variable':
        Any('density', 'temperature', 'mach', 'turbulence')
    })

    equations_to_schema = {
        'euler': fv_euler_schema,
        'RANS': rans_schema,
        'viscous': viscous_schema,
        'LES': viscous_schema,
        'DGviscous': dg_viscous_schema,
        'DGRANS': dg_rans_schema,
        'DGeuler': dg_euler_schema,
        #        'DGLES': dg_rans_schema,
    }

    d[material_key] = material_schema(d.get(material_key, {}))
    d['time marching'] = timemarching_schema(d['time marching'])
    d[equations_key] = equations_to_schema[equations_key](d[equations_key])

    for k in ic_keys:
        d[k] = ic_schema(d[k])

    for k in bc_keys:
        pass

    for k in fz_keys:
        pass

    return d
コード例 #15
0
    Required('volumes'):
    Schema(
        All(dict, Length(min=1), _dict_value_unique,
            {All(Length(min=1), _safepathcomp_str): FqdnUrl()})),
    'finished':
    bool,
    'description':
    All(str, _st_str),
    'authors':
    All([_st_str], Unique()),
})

meta_schema = parsed_meta_schema.extend({
    Required('url'):
    FqdnUrl(),
    Required('volumes_checked_time'):
    DT.datetime,
    Required('volumes_modified_time'):
    DT.datetime,
})

config_schema = Schema({
    'data_dirs':
    All(
        [
            All(_safepath_str, Length(min=1)),
        ],
        Length(min=1),
    ),
    'logging_dir':
    Any(None, All(_safepath_str, Length(min=1))),
    'logging_level':
コード例 #16
0
class ChannelsEndpoint:
    """Handle channel/message related endpoints."""
    def __init__(self, server):
        self.server = server
        self.guild_man = server.guild_man

        self.channel_edit_base = Schema(
            {
                'name': All(str, Length(min=2, max=100)),
                'position': int,
                Optional('nsfw'): bool,
            },
            required=True)

        self.textchan_editschema = self.channel_edit_base.extend(
            {'topic': All(str, Length(min=0, max=1024))})

        self.voicechan_editschema = self.channel_edit_base.extend({
            'bitrate':
            All(int, Range(min=8000, max=96000)),
            'user_limit':
            All(int, Range(min=0, max=99)),
        })

        self.register()

    def register(self):
        self.server.add_get('channels/{channel_id}', self.h_get_channel)

        self.server.add_get('channels/{channel_id}/messages',
                            self.h_get_messages)
        self.server.add_get('channels/{channel_id}/messages/{message_id}',
                            self.h_get_single_message)

        self.server.add_post('channels/{channel_id}/messages',
                             self.h_post_message)
        self.server.add_patch('channels/{channel_id}/messages/{message_id}',
                              self.h_patch_message)

        self.server.add_delete('channels/{channel_id}/messages/{message_id}',
                               self.h_delete_message)

        self.server.add_post('channels/{channel_id}/typing',
                             self.h_post_typing)

        self.server.add_put('channels/{channel_id}', self.h_edit_channel)

        self.server.add_get('channels/{channel_id}/pins', self.h_get_pins)
        self.server.add_put('channels/{channel_id}/pins/{message_id}',
                            self.h_add_pin)
        self.server.add_delete('channels/{channel_id}/pins/{message_id}',
                               self.h_remove_pin)

        self.server.add_patch('channels/{channel_id}', self.h_edit_channel)
        self.server.add_delete('channels/{channel_id}', self.h_delete_channel)

        self.server.add_post('channels/{chanel_id}/messages/bulk-delete',
                             self.h_bulk_delete)

    @auth_route
    async def h_get_channel(self, request, user):
        """`GET /channels/{channel_id}`.

        Returns a channel object
        """

        channel_id = request.match_info['channel_id']

        channel = self.guild_man.get_channel(channel_id)
        if channel is None:
            return _err(errno=10003)

        guild = channel.guild

        if user.id not in guild.members:
            return _err('401: Unauthorized')

        return _json(channel.as_json)

    @auth_route
    async def h_post_typing(self, request, user):
        """`POST /channels/{channel_id}/typing`.

        Dispatches TYPING_START events to relevant clients.
        Returns a HTTP empty response with status code 204.
        """

        channel_id = request.match_info['channel_id']

        channel = self.guild_man.get_channel(channel_id)
        if channel is None:
            return _err(errno=10003)

        if user.id not in channel.guild.members:
            return _err(errno=40001)

        self.server.loop.create_task(
            self.server.presence.typing_start(user.id, channel.id))
        return web.Response(status=204)

    async def get_attachments(self, request) -> dict:
        """Get a single attachment from a request."""
        payload = {'raw_attachment': None}

        try:
            reader = await request.multipart()
        except AssertionError:
            log.info('failed to multipart')
            return payload

        while not reader.at_eof():
            part = await reader.next()
            if not part:
                break

            part_data = await part.read()
            log.info('part name %r', part.name)
            log.info('part filename: %r', part.filename)

            if not part.filename:
                try:
                    json_data = json.loads(part_data)
                except:
                    json_data = part_data.decode()

                payload[part.name] = json_data
                log.info('key %r -> data %r', part.name, json_data)
            else:
                payload['raw_attachment'] = [
                    part.filename, part_data,
                    len(part_data)
                ]

        return payload

    @auth_route
    async def h_post_message(self, request, user):
        """`POST /channels/{channel_id}/messages/`.

        Send a message.
        Dispatches MESSAGE_CREATE events to relevant clients.
        """

        channel_id = request.match_info['channel_id']
        channel = self.guild_man.get_channel(channel_id)

        if channel is None:
            return _err(errno=10003)

        if user.id not in channel.guild.members:
            return _err(errno=40001)

        # check attachments
        payload = await self.get_attachments(request)
        attachment = payload['raw_attachment']

        if not attachment:
            try:
                payload = await request.json()
            except:
                log.exception('err')
                return _err("error parsing")

        content = str(payload['content'])

        if len(content) < 1 and (not attachment):
            return _err(errno=50006)

        if len(content) > 2000:
            return web.Response(status=400)

        _data = {
            'message_id': get_snowflake(),
            'author_id': user.id,
            'channel_id': channel.id,
            'content': content,
            'nonce': payload.get('nonce'),
        }

        if attachment:
            # do image shit here
            data = attachment[1]

            image_hash, block = await self.server.images.raw_add_image(
                data, 'attachment', {
                    'filename': attachment[0],
                    'size': attachment[2],
                })

            if image_hash is None or block is None:
                # failed to process
                return web.Response(status=400,
                                    text='Failed to process image.')

            _data['attach_passon'] = [block]
            _data['attachments'] = [image_hash]

        new_message = await self.guild_man.new_message(channel, user, _data)
        return _json(new_message.as_json)

    @auth_route
    async def h_get_single_message(self, request, user):
        """`GET /channels/{channel_id}/messages/{message_id}`.

        Get a single message by its snowflake ID.
        """

        channel_id = request.match_info['channel_id']
        message_id = request.match_info['message_id']

        channel = self.guild_man.get_channel(channel_id)

        if channel is None:
            return _err(errno=10003)

        if user.id not in channel.guild.members:
            return _err(errno=40001)

        message = channel.get_message(message_id)
        if message is None:
            return _err(errno=10008)

        return _json(message.as_json)

    @auth_route
    async def h_get_messages(self, request, user):
        """`GET /channels/{channel_id}/messages`.

        Returns a list of messages.
        """

        channel_id = request.match_info['channel_id']
        channel = self.guild_man.get_channel(channel_id)

        if channel is None:
            return _err(errno=10003)

        if user.id not in channel.guild.members:
            return _err(errno=40001)

        limit = request.query.get('limit', 50)

        try:
            limit = int(limit)
        except:
            return _err('limit is not a integer')

        if not ((limit >= 1) and (limit <= 100)):
            return _err(f'limit not in 1-100 range, {limit}')

        around = request.query.get('around', -1)
        before = request.query.get('before', -1)
        after = request.query.get('after', -1)

        try:
            around = int(around)
            before = int(before)
            after = int(after)
        except:
            return _err('parameters are not integers')

        message_list = await channel.last_messages(limit)

        if around != -1:
            avg = int(limit / 2)
            before = around + avg
            after = around - avg

            message_list = [
                m for m in message_list if (m.id < before) and (m.id > after)
            ]

        elif before != -1:
            message_list = [m for m in message_list if (m.id < before)]

        elif after != -1:
            message_list = [m for m in message_list if (m.id > after)]

        j = [m.as_json for m in message_list]
        return _json(j)

    @auth_route
    async def h_delete_message(self, request, user):
        """`DELETE /channels/{channel_id}/messages/{message_id}`.

        Delete a message sent by the user.
        """

        channel_id = request.match_info['channel_id']
        message_id = request.match_info['message_id']

        channel = self.guild_man.get_channel(channel_id)

        if channel is None:
            return _err(errno=10003)

        if user.id not in channel.guild.members:
            return _err(errno=40001)

        message = channel.get_message(message_id)
        if message is None:
            return _err(errno=10008)

        if user.id != message.author.id:
            return _err(errno=40001)

        await self.guild_man.delete_message(message)
        return web.Response(status=204)

    @auth_route
    async def h_patch_message(self, request, user):
        """`PATCH /channels/{channel_id}/messages/{message_id}`.

        Update a message sent by the current user.
        """

        channel_id = request.match_info['channel_id']
        message_id = request.match_info['message_id']

        channel = self.guild_man.get_channel(channel_id)

        if channel is None:
            return _err(errno=10003)

        if user.id not in channel.guild.members:
            return _err(errno=40001)

        message = channel.get_message(message_id)
        if message is None:
            return _err(errno=10008)

        if user.id != message.author.id:
            return _err(errno=50005)

        try:
            payload = await request.json()
        except:
            return _err("error parsing")

        _data = {
            'content': str(payload.get('content', None)),
        }

        if _data['content'] is None:
            return _err('Erroneous payload')

        await self.guild_man.edit_message(message, _data)
        return _json(message.as_json)

    @auth_route
    async def h_bulk_delete(self, request, user):
        """`POST /channels/{channel_id}/messages/bulk-delete`.
        
        Deletes multiple messages.
        Returns 204 empty response on success, fires mutiple MESSAGE_DELETEs.
        """
        channel_id = request.match_info['channel_id']
        channel = self.guild_man.get_channel(channel_id)
        if channel is None:
            return _err(errno=10003)

        payload = await request.json()
        messages = payload['messages']
        if len(messages) < 1:
            # uhh, you sent an empty array... I think this is a success.
            return web.Response(status=204)

        messages = [int(message_id) for message_id in messages]
        current = time.time()
        for message_id in messages:
            timestamp = snowflake_time(message_id)
            delta = current - timestamp
            if delta > BULK_DELETE_LIMIT:
                # do the error
                return _err('Message too old.')

        messages = set(messages)

        # since it can take some time, we create a task
        self.server.loop.create_task(
            channel.delete_many(messages, fire_multiple=True))

        return web.Response(status=204)

    @auth_route
    async def h_edit_channel(self, request, user):
        """`PUT/PATCH /channels/{channel_id}`.

        Edit a channel. Receives a JSON payload.
        """
        channel_id = request.match_info['channel_id']
        chan = self.guild_man.get_channel(channel_id)
        if chan is None:
            return _err(errno=10003)

        if chan.guild.owner_id != user.id:
            return _err(errno=40001)

        payload = await request.json()

        if isinstance(chan, BaseTextChannel):
            payload = self.textchan_editschema(payload)
        elif isinstance(chan, BaseVoiceChannel):
            payload = self.voicechan_editschema(payload)

        new_chan = await chan.edit(payload)
        return _json(new_chan.as_json)

    @auth_route
    async def h_delete_channel(self, request, user):
        """`DELETE /channels/{channel_id}`.
        
        Delete a channel.
        Fires CHANNEL_DELETE events to respective clients.
        """
        channel_id = request.match_info['channel_id']
        chan = self.guild_man.get_channel(channel_id)
        if chan is None:
            return _err(errno=10003)

        if chan.guild.owner_id != user.id:
            return _err(errno=40001)

        await chan.delete()
        return _json(chan.as_json)

    @auth_route
    async def h_get_pins(self, request, user):
        """`GET /channels/{channel_id}/pins`

        Returns all pinned messages in the channel as an array of message objects.
        """
        channel_id = request.match_info['channel_id']
        channel = self.guild_man.get_channel(channel_id)
        if channel is None:
            return _err(errno=10003)

        return _json(await channel.get_pins())

    @auth_route
    async def h_add_pin(self, request, user):
        """`PUT /channels/{channel_id}/pins/{message_id}`

        Pins a message.
        Returns 204 empty response on success, fires a CHANNEL_PINS_UPDATE event.
        TODO: Add perms to this
        """
        channel_id = request.match_info['channel_id']
        message_id = request.match_info['message_id']

        channel = self.guild_man.get_channel(channel_id)

        if channel is None:
            return _err(errno=10003)

        if user.id not in channel.guild.members:
            return _err(errno=40001)

        message = channel.get_message(message_id)
        if message is None:
            return _err(errno=10008)

        if message.channel_id != channel.id:
            return _err(errno=50019)

        if len(await channel.get_pins()) == 50:
            return _err('exceeded channel pin limit')

        await channel.add_pin(message_id)
        return web.Response(status=204)

    @auth_route
    async def h_remove_pin(self, request, user):
        """`DELETE /channels/{channel_id}/pins/{message_id}`

        Removes a pinned message.
        Returns 204 empty response on success, fires a CHANNEL_PINS_UPDATE event.
        TODO: Add perms to this
        """
        channel_id = request.match_info['channel_id']
        message_id = request.match_info['message_id']

        channel = self.guild_man.get_channel(channel_id)

        if channel is None:
            return _err(errno=10003)

        if user.id not in channel.guild.members:
            return _err(errno=40001)

        message = channel.get_message(message_id)
        if message is None:
            return _err(errno=10008)

        if message.channel_id != channel.id:
            return _err(errno=50019)

        await channel.remove_pin(message_id)
        return web.Response(status=204)
コード例 #17
0
def create_platform_basics(
    logger: "logging.Logger",
    entity_domain: str,
    entity_factory: Type["HekrEntity"],
    base_schema: vol.Schema,
):
    if entity_factory is None:
        entity_factory = HekrEntity

    config_key = None
    protocol_key = None
    for conf_key, (ent_domain, protocol_key) in CONF_DOMAINS.items():
        if ent_domain == entity_domain:
            config_key = conf_key
            protocol_key = protocol_key
            break

    if config_key is None:
        raise ValueError(
            'Entity domain "%s" is not supported for [%s] domain.' %
            (entity_domain, DOMAIN))

    async def _async_setup_entry(
        hass: HomeAssistantType,
        config_entry: config_entries.ConfigEntry,
        async_add_devices,
    ):
        conf = config_entry.data
        config_type = CONF_DEVICE if CONF_DEVICE in conf else CONF_ACCOUNT
        hekr_data: "HekrData" = hass.data[DOMAIN]
        item_config = conf[config_type]

        if config_type == CONF_DEVICE:
            device_id = item_config[CONF_DEVICE_ID]
            device_cfg = hekr_data.devices_config_entries[device_id]
            _LOGGER.debug("Adding local device %s with config: %s" %
                          (device_id, device_cfg))
            return await _setup_entity(
                logger=logger,
                hass=hass,
                async_add_entities=async_add_devices,
                config=device_cfg,
                config_key=config_key,
                protocol_key=protocol_key,
                entity_domain=entity_domain,
                entity_factory=entity_factory,
            )

        elif config_type == CONF_ACCOUNT:
            account_id = item_config[CONF_USERNAME]

            tasks = []
            for device_id, device in hekr_data.get_account_devices(
                    account_id).items():
                device_cfg = hekr_data.devices_config_entries[device_id]
                _LOGGER.debug(
                    "Adding device %s for account %s with config: %s" %
                    (device_id, account_id, device_cfg))
                tasks.append(
                    _setup_entity(
                        logger=logger,
                        hass=hass,
                        async_add_entities=async_add_devices,
                        config=device_cfg,
                        config_key=config_key,
                        protocol_key=protocol_key,
                        entity_domain=entity_domain,
                        entity_factory=entity_factory,
                    ))

            return all(await asyncio.wait(tasks))

        return False

    async def _async_setup_platform(hass: HomeAssistantType,
                                    config: ConfigType, *_, **__):
        # @TODO: this is a deprecated block of code

        _LOGGER.error(
            "Platform setup is deprecated! Please, convert your configuration to use component instead of "
            "platform. A persistent notification will be created with config for your particular device."
        )

        del config[CONF_PLATFORM]

        from homeassistant.components.persistent_notification import (
            DOMAIN,
            SERVICE_CREATE,
            ATTR_TITLE,
            ATTR_MESSAGE,
        )

        def timedelta_str(td: timedelta, offset: str) -> str:
            hours = td.seconds // 3600
            minutes = (td.seconds % 3600) // 60
            seconds = td.seconds % 60

            return offset + ("\n" + offset).join([
                "%s: %s" % replace for replace in {
                    "seconds": seconds,
                    "minutes": minutes,
                    "hours": hours,
                    "days": td.days,
                }.items() if replace[1] != 0
            ])

        conversion_content = "```\nhekr:\n  devices:\n    - "
        default_values = {
            k: k.default()
            for k in BASE_PLATFORM_SCHEMA.keys()
            if not isinstance(k.default, vol.schema_builder.Undefined)
        }
        conversion_content += "\n      ".join([
            "%s: %s" % (
                k,
                "\n" +
                timedelta_str(v, " " * 8) if isinstance(v, timedelta) else v,
            ) for k, v in config.items() if default_values.get(k) != v
        ])
        conversion_content += "\n```"

        hass.async_create_task(
            hass.services.async_call(
                DOMAIN,
                SERVICE_CREATE,
                {
                    ATTR_TITLE:
                    "Hekr device %s" % config[CONF_DEVICE_ID],
                    ATTR_MESSAGE:
                    "Setting up Hekr devices using platforms is no longer supported. Consider switching to "
                    "an integration setup via interface or YAML. To port your platform configuration to the "
                    "new format, an entry has been provided for you to copy:\n"
                    + conversion_content,
                },
            ))
        return False

    _PLATFORM_SCHEMA = vol.All(
        base_schema.extend(BASE_PLATFORM_SCHEMA),
        test_for_list_correspondence(config_key, protocol_key),
    )

    return _PLATFORM_SCHEMA, _async_setup_platform, _async_setup_entry
コード例 #18
0
ファイル: test_partials.py プロジェクト: ccooper/releasetasks
    def test_generator_signing_balrog_tasks(self):
        for p in ("win32", "macosx64"):
            for v, appV in (("38.0build1", "38.0"), ("37.0build2", "37.0")):
                generator = get_task_by_name(
                    self.graph,
                    "{}_en-US_{}_funsize_update_generator".format(p, v))
                signing = get_task_by_name(
                    self.graph,
                    "{}_en-US_{}_funsize_signing_task".format(p, v))
                balrog = get_task_by_name(
                    self.graph, "{}_en-US_{}_funsize_balrog_task".format(p, v))

                generator_schema = Schema(
                    {
                        'requires': [self.generator_image['taskId']],
                        'task': {
                            'metadata': {
                                'name':
                                "[funsize] Update generating task %s %s for %s"
                                % (
                                    p,
                                    "en-US",
                                    v.split('build')[0],
                                )
                            }
                        }
                    },
                    extra=True,
                    required=True)

                signing_schema = Schema(
                    {
                        'requires': [generator['taskId']],
                        'task': {
                            'metadata': {
                                'name':
                                "[funsize] MAR signing task %s %s for %s" % (
                                    p,
                                    "en-US",
                                    v.split('build')[0],
                                ),
                            },
                            'payload': {
                                'signingManifest':
                                "https://queue.taskcluster.net/v1/task/%s/artifacts/public/env/manifest.json"
                                % generator["taskId"],
                            },
                            'scopes': [
                                "project:releng:signing:cert:release-signing",
                                "project:releng:signing:format:mar",
                                "project:releng:signing:format:gpg",
                            ],
                        },
                    },
                    extra=True,
                    required=True)

                balrog_schema = Schema(
                    {
                        'requires': [
                            signing['taskId'],
                            self.funsize_balrog_image['taskId']
                        ],
                        'task': {
                            'scopes': ["docker-worker:feature:balrogVPNProxy"],
                            'metadata': {
                                'name':
                                "[funsize] Publish to Balrog %s %s for %s" % (
                                    p,
                                    "en-US",
                                    v.split('build')[0],
                                ),
                            }
                        }
                    },
                    extra=True,
                    required=True)

                if p == "win32":
                    generator_schema = generator_schema.extend({
                        'task': {
                            'extra': {
                                'funsize': {
                                    'partials': [{
                                        'from_mar':
                                        "http://download.mozilla.org/?product=firefox-%s-complete&os=win&lang=en-US"
                                        % appV,
                                        'to_mar':
                                        "https://queue.taskcluster.net/v1/task/xyy/artifacts/public/build/firefox-42.0.en-US.win32.complete.mar",
                                    }]
                                }
                            }
                        }
                    })

                elif p == "macosx64":
                    generator_schema = generator_schema.extend({
                        'task': {
                            'extra': {
                                'funsize': {
                                    'partials': [{
                                        'from_mar':
                                        "http://download.mozilla.org/?product=firefox-%s-complete&os=osx&lang=en-US"
                                        % appV,
                                        'to_mar':
                                        "https://queue.taskcluster.net/v1/task/xyz/artifacts/public/build/firefox-42.0.en-US.mac.complete.mar",
                                    }]
                                }
                            }
                        }
                    })

                verify(generator, generator_schema,
                       TestEnUSPartials.generator_not_allowed)
                verify(balrog, balrog_schema)
                verify(signing, signing_schema)
コード例 #19
0
    str,
})

target_config_schema = global_config_schema.extend({
    Required("project"):
    str,
    Required("resolution_ms", default=60000):
    All(int, Range(min=60000)),
    Required("window_seconds", default=60):
    All(int, Range(min=60)),
    Required("searches"):
    list([str]),
    Required("percentiles"):
    list([Coerce(float)]),
    Required("include_ops_counts", default=True):
    bool,
    Required("include_error_counts", default=True):
    bool,
    # repeat keys from global to remove default values
    "run_interval_seconds":
    All(int, Range(min=1)),
    "batch_size":
    All(int, Range(min=1)),
    "retries":
    All(int, Range(min=1, max=20)),
    "backoff_factor":
    All(float, Range(min=0)),
})

config_schema = Schema({
    Required("sumo_http_url"):
コード例 #20
0
ファイル: schemas.py プロジェクト: chronoB/pygti
    "city": str,
    "combinedName": str,
    "id": str,
    "type": SDType,
    "coordinate": Coordinate,
    "tariffDetails": TariffDetails,
    "serviceTypes": [str],
    "hasStationInformation": bool,
})

CNRequest = Schema.extend(
    BaseRequestType,
    {
        "theName": SDName,
        "maxList": int,
        "maxDistance": int,
        "coordinateType": CoordinateType,
        "tariffDetails": bool,
        "allowTypeSwitch": bool,
    },
)

GTITime = Schema({"date": str, "time": str})

FilterEntry = Schema({
    Required("serviceID"): str,
    "stationIDs": [str],
    "serviceName": str,
    "label": str
})
コード例 #21
0
    def test_generator_signing_balrog_tasks(self):
        for p in ("win32", "macosx64"):
            for v, appV in (("38.0build1", "38.0"), ("37.0build2", "37.0")):
                generator = get_task_by_name(self.graph, "{}_en-US_{}_funsize_update_generator".format(p, v))
                signing = get_task_by_name(self.graph, "{}_en-US_{}_funsize_signing_task".format(p, v))
                balrog = get_task_by_name(self.graph, "{}_en-US_{}_funsize_balrog_task".format(p, v))

                generator_schema = Schema({
                    'requires': [self.generator_image['taskId']],
                    'task': {
                        'metadata': {
                            'name': "[funsize] Update generating task %s %s for %s" % (p, "en-US", v.split('build')[0],)
                        }
                    }
                }, extra=True, required=True)

                signing_schema = Schema({
                    'requires': [generator['taskId']],
                    'task': {
                        'metadata': {
                            'name': "[funsize] MAR signing task %s %s for %s" % (p, "en-US", v.split('build')[0],),
                        },
                        'payload': {
                            'signingManifest': "https://queue.taskcluster.net/v1/task/%s/artifacts/public/env/manifest.json" % generator["taskId"],
                        },
                        'scopes': [
                            "project:releng:signing:cert:release-signing",
                            "project:releng:signing:format:mar",
                            "project:releng:signing:format:gpg",
                        ],
                    },
                }, extra=True, required=True)

                balrog_schema = Schema({
                    'requires': [signing['taskId'], self.funsize_balrog_image['taskId']],
                    'task': {
                        'scopes': ["docker-worker:feature:balrogVPNProxy"],
                        'metadata': {
                            'name': "[funsize] Publish to Balrog %s %s for %s" % (p, "en-US", v.split('build')[0],),
                        }
                    }
                }, extra=True, required=True)

                if p == "win32":
                    generator_schema = generator_schema.extend({
                        'task': {
                            'extra': {
                                'funsize': {
                                    'partials': [
                                        {
                                            'from_mar': "http://download.mozilla.org/?product=firefox-%s-complete&os=win&lang=en-US" % appV,
                                            'to_mar': "https://queue.taskcluster.net/v1/task/xyy/artifacts/public/build/firefox-42.0.en-US.win32.complete.mar",
                                        }
                                    ]
                                }
                            }
                        }
                    })

                elif p == "macosx64":
                    generator_schema = generator_schema.extend({
                        'task': {
                            'extra': {
                                'funsize': {
                                    'partials': [
                                        {
                                            'from_mar': "http://download.mozilla.org/?product=firefox-%s-complete&os=osx&lang=en-US" % appV,
                                            'to_mar': "https://queue.taskcluster.net/v1/task/xyz/artifacts/public/build/firefox-42.0.en-US.mac.complete.mar",
                                        }
                                    ]
                                }
                            }
                        }
                    })

                verify(generator, generator_schema, TestEnUSPartials.generator_not_allowed)
                verify(balrog, balrog_schema)
                verify(signing, signing_schema)
コード例 #22
0
    def validate(self, tipo_doc, data):
        doc = Schema({
            Required('serie'):
            All(str, Length(min=4, max=4)),
            Required('correlativo'):
            All(str, Length(min=1, max=8)),
            Required('nombreEmisor'):
            All(str, Length(min=1, max=100)),
            Required('tipoDocEmisor'):
            All(str,
                Length(min=1, max=2),
                msg='El tipo de Doc. Emisor debe '
                'tener un tamaño entre 1 y 2'),
            Required('numDocEmisor'):
            All(str, Length(min=1, max=25)),
            'direccionOrigen':
            All(str, Length(min=1, max=100)),
            'direccionUbigeo':
            All(str, Length(min=6, max=6)),
            Required('tipoDocReceptor'):
            All(str, Length(min=1, max=2)),
            Required('numDocReceptor'):
            All(str, Length(min=1, max=25)),
            Required('nombreReceptor'):
            All(str, Length(min=1, max=100)),
            # TODO: Verificar si hay problemas en el orden
            Required('tipoMoneda'):
            All(str, Length(min=3, max=3)),
            'mntNeto':
            Coerce(float),
            'mntTotalIgv':
            Coerce(float),
            'mntTotal':
            Coerce(float),
            'fechaVencimiento':
            All(str, Length(min=10, max=10)),
            'tipoFormatoRepresentacionImpresa':
            All(str, Length(min=1, max=100)),
        })

        if tipo_doc in '03':
            # Boletas
            doc = doc.extend({
                'direccionDestino':
                All(str, Length(min=1, max=100)),
            })
        if tipo_doc in ('07', '08'):
            # Nota Crédito
            doc = doc.extend({
                Required('sustento'):
                All(str, Length(min=1, max=100)),
                Required('tipoMotivoNotaModificatoria'):
                All(str, Length(min=2, max=2))
            })

        impuesto = Schema(
            All([{
                'codImpuesto': All(str, Length(min=1, max=4)),
                'montoImpuesto': Coerce(float),
                'tasaImpuesto': Coerce(float),
            }]))
        detalle = Schema(
            All(
                [{
                    Required('cantidadItem'):
                    Coerce(float),
                    Required('unidadMedidaItem'):
                    All(str, Length(min=1, max=3)),
                    'codItem':
                    All(str, Length(min=1, max=30)),
                    Required('nombreItem'):
                    All(str, Length(min=1, max=250)),
                    # TODO: No debe ser obligatorio para Notas
                    Required('precioItem'):
                    Coerce(float),
                    Required('precioItemSinIgv'):
                    Coerce(float),
                    Required('montoItem'):
                    Coerce(float),
                    # TODO-FIN
                    'descuentoMonto':
                    Coerce(float),
                    Required('codAfectacionIgv'):
                    All(str, Length(min=2, max=2)),
                    'tasaIgv':
                    Coerce(float),
                    'montoIgv':
                    Coerce(float),
                    Required('idOperacion'):
                    All(str, Length(min=1, max=80))
                }],
                Length(min=1)))
        descuento = Schema(All({
            'mntTotalDescuentos': Coerce(float),
        }))

        schema = Schema({
            Required('documento'):
            doc,
            Required('tipoDocumento'):
            All(str, Length(min=2, max=2)),
            Required('fechaEmision'):
            All(str, Length(min=10, max=10)),
            Required('idTransaccion'):
            All(str, Length(min=1)),
            'correoReceptor':
            str,
            Required('impuesto'):
            impuesto,
            Required('detalle'):
            detalle,
            'descuento':
            descuento,
        })
        if tipo_doc in ('07', '08'):
            referencia = Schema(
                All([{
                    'tipoDocumentoRef': All(str, Length(min=1, max=2)),
                    'serieRef': All(str, Length(min=4, max=4)),
                    'correlativoRef': All(str, Length(min=1, max=8)),
                    'fechaEmisionRef': All(str, Length(min=10, max=10)),
                }]))

            schema = schema.extend({
                'referencia': referencia,
            })
        return schema(data)
コード例 #23
0
ファイル: organisation.py プロジェクト: openpermissions/perch
        is_reseller_preverifying = user.is_reseller() and data.get(
            'pre_verified', False)
        raise Return(is_admin or is_reseller_preverifying)


all_permission_schema = Schema(
    {
        'type': 'all',
        'permission': In(PERMISSIONS),
        'value': None
    },
    required=True)
organisation_permission_schema = all_permission_schema.extend({
    'type':
    'organisation_id',
    'permission':
    In(PERMISSIONS),
    'value':
    unicode
})
service_type_permission_schema = all_permission_schema.extend({
    'type':
    'service_type',
    'permission':
    In(PERMISSIONS),
    'value':
    In(SERVICE_TYPES)
})


def group_permissions(permissions):
    """
コード例 #24
0
ファイル: schemas.py プロジェクト: muffinista/ivory
            constants.PUNISH_SUSPEND)
    },
    extra=ALLOW_EXTRA)

PendingAcctPunishment = Schema(
    {Required("type"): Any(constants.PUNISH_REJECT)}, extra=ALLOW_EXTRA)

Rule = Schema(
    {
        Required("name"): str,
        Required("type"): str,
        Required("severity"): int,
    },
    extra=ALLOW_EXTRA)

ReportRule = Rule.extend({Required("punishment"): ReportPunishment})

PendingAcctRule = Rule.extend({Required("punishment"): PendingAcctPunishment})

Reports = Schema({Required("rules"): [ReportRule]})

PendingAccounts = Schema({Required("rules"): [PendingAcctRule]})

IvoryConfig = Schema({
    Required("token"):
    str,
    # I know I should be using Url() here but it didn't work and I'm tired
    Required("instanceURL"):
    str,
    "waitTime":
    int,
コード例 #25
0
def create_platform_basics(logger: 'logging.Logger', entity_domain: str,
                           entity_factory: Type['HekrEntity'],
                           base_schema: vol.Schema):
    if entity_factory is None:
        entity_factory = HekrEntity

    config_key = None
    protocol_key = None
    for conf_key, (ent_domain, proto_key) in CONF_DOMAINS.items():
        if ent_domain == entity_domain:
            config_key = conf_key
            protocol_key = proto_key
            break

    if config_key is None:
        raise ValueError(
            'Entity domain "%s" is not supported for [%s] domain.' %
            (entity_domain, DOMAIN))

    async def _async_setup_entry(hass: HomeAssistantType,
                                 config_entry: config_entries.ConfigEntry,
                                 async_add_devices):
        conf = config_entry.data
        config_type = CONF_DEVICE if CONF_DEVICE in conf else CONF_ACCOUNT
        item_config = conf[config_type]

        if config_type == CONF_DEVICE:
            return await _setup_entity(logger=logger,
                                       hass=hass,
                                       async_add_entities=async_add_devices,
                                       config=item_config,
                                       config_key=config_key,
                                       protocol_key=protocol_key,
                                       entity_domain=entity_domain,
                                       entity_factory=entity_factory)

        return False

    async def _async_setup_platform(hass: HomeAssistantType,
                                    config: ConfigType, async_add_entities,
                                    *_):
        if config.get(CONF_NAME) is None:
            protocol = SUPPORTED_PROTOCOLS[config[CONF_PROTOCOL]]
            config[CONF_NAME] = DEFAULT_NAME_DEVICE.format(
                protocol_name=protocol.get(PROTOCOL_NAME),
                host=config.get(CONF_HOST),
                device_id=config.get(CONF_DEVICE_ID),
            )

        return await _setup_entity(logger=logger,
                                   hass=hass,
                                   async_add_entities=async_add_entities,
                                   config=config,
                                   config_key=config_key,
                                   protocol_key=protocol_key,
                                   entity_domain=entity_domain,
                                   entity_factory=entity_factory)

    _PLATFORM_SCHEMA = vol.All(
        base_schema.extend(BASE_PLATFORM_SCHEMA), exclusive_auth_methods,
        test_for_list_correspondence(config_key, protocol_key))

    return _PLATFORM_SCHEMA, _async_setup_platform, _async_setup_entry
コード例 #26
0
    def setUp(self):
        # Task attributes common to each partner repack
        common_task_schema = Schema({
            'task': {
                'provisionerId': 'buildbot-bridge',
                'workerType': 'buildbot-bridge',
                'payload': {
                    'properties': {
                        'version': '42.0b2',
                        'build_number': 3,
                    }
                }
            }
        })

        self.partner_task_schema = common_task_schema.extend({
            'task': {
                'payload': {
                    'properties': {
                        'repack_manifests_url': '[email protected]:mozilla-partners/repack-manifests.git',
                    }
                }
            }
        }, required=True, extra=True)

        self.eme_free_task_schema = common_task_schema.extend({
            'task': {
                'payload': {
                    'properties': {
                        'repack_manifests_url': 'https://github.com/mozilla-partners/mozilla-EME-free-manifest',
                    }
                }
            }
        }, required=True, extra=True)

        self.sha1_task_schema = common_task_schema.extend({
            'task': {
                'payload': {
                    'properties': {
                        'repack_manifests_url': 'https://github.com/mozilla-partners/mozilla-sha1-manifest',
                    }
                }
            }
        }, required=True, extra=True)

        test_kwargs = create_firefox_test_args({
            'push_to_candidates_enabled': True,
            'push_to_releases_enabled': True,
            'push_to_releases_automatic': True,
            'source_enabled': True,
            'signing_pvt_key': PVT_KEY_FILE,
            'partner_repacks_platforms': ['win32', 'linux'],
            'eme_free_repacks_platforms': ['win32', 'macosx64'],
            'sha1_repacks_platforms': ['win32'],
            'release_channels': ['foo', 'bar'],
            'en_US_config': {
                "platforms": {
                    "linux": {'signed_task_id': 'abc', 'unsigned_task_id': 'abc'},
                    "macosx64": {'signed_task_id': 'abc', 'unsigned_task_id': 'abc'},
                    "win32": {'signed_task_id': 'abc', 'unsigned_task_id': 'abc'},
                }
            },
            'l10n_config': {
                "platforms": {
                    "win32": {
                        "en_us_binary_url": "https://queue.taskcluster.net/something/firefox.exe",
                        "mar_tools_url": "https://queue.taskcluster.net/something/",
                        "locales": ["de", "en-GB", "zh-TW"],
                        "chunks": 1,
                    },
                    "linux": {
                        "en_us_binary_url": "https://queue.taskcluster.net/something/firefox.tar.xz",
                        "mar_tools_url": "https://queue.taskcluster.net/something/",
                        "locales": ["de", "en-GB", "zh-TW"],
                        "chunks": 1,
                    },
                    "macosx64": {
                        "en_us_binary_url": "https://queue.taskcluster.net/something/firefox.dmg",
                        "mar_tools_url": "https://queue.taskcluster.net/something/",
                        "locales": ["de", "en-GB", "zh-TW"],
                        "chunks": 1,
                    },
                },
                "changesets": {
                    "de": "default",
                    "en-GB": "default",
                    "zh-TW": "default",
                },
            },
        })

        self.graph = make_task_graph(**test_kwargs)
        self.partner_tasks = [
            get_task_by_name(self.graph, "release-foo-firefox-{}_partner_repacks".format(platform))
            for platform in ["win32", "linux"]
        ]
        self.eme_free_tasks = [
            get_task_by_name(self.graph, "release-foo-firefox-{}_eme_free_repacks".format(platform))
            for platform in ["win32", "macosx64"]
        ]
        self.sha1_tasks = [
            get_task_by_name(self.graph, "release-foo-firefox-{}_sha1_repacks".format(platform))
            for platform in ["win32"]
        ]

        self.partner_push_to_mirrors_task = get_task_by_name(self.graph, "release-foo-firefox_partner_repacks_copy_to_releases")
        self.push_to_mirrors_task = get_task_by_name(self.graph, "release-foo_firefox_push_to_releases")

        self.upstream_dependencies = [
            "release-foo_firefox_{}_complete_en-US_beetmover_candidates".format(platform)
            for platform in ["win32", "linux", "macosx64"]
        ] + [
            "release-foo_firefox_{}_l10n_repack_beetmover_candidates_1".format(platform)
            for platform in ["win32", "linux", "macosx64"]
        ]
コード例 #27
0
    "metadata":
    str,
})

target_config_schema = global_config_schema.extend({
    Required("url"):
    Url(),
    Required("name"):
    str,
    Required("exclude_metrics", default=[]):
    list([str]),
    Required("include_metrics", default=[]):
    list([str]),
    # repeat keys from global to remove default values
    "run_interval_seconds":
    All(int, Range(min=1)),
    "target_threads":
    All(int, Range(min=1, max=50)),
    "batch_size":
    All(int, Range(min=1)),
    "retries":
    All(int, Range(min=1, max=20)),
    "backoff_factor":
    All(float, Range(min=0)),
    "token_file_path":
    IsFile(),
})

config_schema = Schema({
    Required("sumo_http_url"):
    Url(),
コード例 #28
0
        raise Invalid('queues must be a list')

    for val in value:
        if not isinstance(val, str):
            raise Invalid('all queues must be strings')

    return value


common_schema = Schema({
    'database': validate_redis,
})

scheduler_schema = common_schema

worker_schema = common_schema.extend({'queues': validate_queues})


def load_config(path, component):
    expanded_path = os.path.expanduser(path)
    if os.path.exists(expanded_path) is False:
        LOG.error('config file %s does not exist' % (expanded_path))
        sys.exit(1)

    try:
        with open(expanded_path) as f:
            data = yaml.safe_load(f)
    except IOError as exc:
        LOG.error('failed to open config file %s: %s' %
                  (expanded_path, six.text_type(exc)))
        sys.exit(1)
コード例 #29
0
ファイル: schema.py プロジェクト: dalerxli/LLSpy
        for k, v in __validator__.items()
    },
    extra=PREVENT_EXTRA)

__localSchema__ = __schema__.extend({
    'otfs': [Any(None, filepath)],
    'drdata':
    All(Coerce(float),
        Range(0, 0.5),
        msg='Data pixel size (drdata) must be float between 0.04 - 0.5'),
    'dzdata':
    All(Coerce(float),
        Range(0, 50),
        msg='Data Z step size (dzdata) must be float between 0 - 50'),
    'dzFinal':
    All(Coerce(float),
        Range(0, 50),
        msg='Data Z step size (dzdata) must be float between 0 - 50'),
    'wavelength': [
        All(Coerce(int),
            Range(300, 1000),
            msg='wavelength must be int between 300 - 1000')
    ],
    'deskew':
    All(Coerce(float),
        Range(-180, 180),
        msg='deskew angle must be float between -180 and 180')
})
__localSchema__.extra = PREVENT_EXTRA


def localParams(*args, **kwargs):
class SumoPrometheusScraperConfig:
    def __init__(self):
        self.global_config_schema = Schema({
            Optional("sumo_http_url"):
            Url(),
            Required("run_interval_seconds", default=60):
            All(int, Range(min=1)),
            Required("target_threads", default=10):
            All(int, Range(min=1, max=50)),
            Required("batch_size", default=1000):
            All(int, Range(min=1)),
            Required("retries", default=5):
            All(int, Range(min=1, max=20)),
            Required("backoff_factor", default=0.2):
            All(float, Range(min=0)),
            "source_category":
            str,
            "source_host":
            str,
            "source_name":
            str,
            "dimensions":
            str,
            "metadata":
            str,
        })

        self.target_source_config = Schema(
            Or(
                {Required("url"): Url()},
                {
                    Required("service"): str,
                    Required("namespace"): str
                },
            ))

        url_schema = Schema(
            Or(
                Required("url"),
                Url(),
                {
                    Required("service"): str,
                    Required("namespace"): str,
                    Required("path", default="/metrics"): str,
                    Required("protocol", default="http"): str,
                },
            ))

        self.target_config_schema = self.global_config_schema.extend({
            Required("url", default={}):
            url_schema,
            Required("name"):
            str,
            Required("exclude_metrics", default=[]):
            list([str]),
            Required("include_metrics", default=[]):
            list([str]),
            Required("exclude_labels", default={}):
            Schema({}, extra=ALLOW_EXTRA),
            Required("include_labels", default={}):
            Schema({}, extra=ALLOW_EXTRA),
            Required("strip_labels", default=[]):
            list([str]),
            Required("should_callback", default=True):
            bool,
            "token_file_path":
            IsFile(),
            "verify":
            Any(Boolean(), str),
            # repeat keys from global to remove default values
            "sumo_http_url":
            Url(),
            "run_interval_seconds":
            All(int, Range(min=1)),
            "target_threads":
            All(int, Range(min=1, max=50)),
            "batch_size":
            All(int, Range(min=1)),
            "retries":
            All(int, Range(min=1, max=20)),
            "backoff_factor":
            All(float, Range(min=0)),
        })

        self.config_schema = Schema(
            All(
                {
                    Required("global", default={}):
                    self.global_config_schema,
                    Required("targets"):
                    All(Length(min=1, max=256), [self.target_config_schema]),
                },
                self.check_url,
            ))

    @staticmethod
    def check_url(config):
        if "global" in config:
            if "sumo_http_url" in config["global"]:
                return config

        for t in config["targets"]:
            if "sumo_http_url" not in t:
                raise Invalid("sumo_http_url must be set on target or global.")
        return config
コード例 #31
0
CAMERA_SCHEMA = STREAM_SCEHMA.extend(
    {
        Required("name"):
        All(str, Length(min=1)),
        Optional("mqtt_name", default=None):
        Any(All(str, Length(min=1)), None),
        Required("host"):
        All(str, Length(min=1)),
        Optional("username", default=None):
        Any(All(str, Length(min=1)), None),
        Optional("password", default=None):
        Any(All(str, Length(min=1)), None),
        Optional("global_args", default=CAMERA_GLOBAL_ARGS):
        list,
        Optional("substream"):
        STREAM_SCEHMA,
        Optional("motion_detection"):
        Any(
            {
                Optional("interval"):
                Any(int, float),
                Optional("trigger_detector"):
                bool,
                Optional("timeout"):
                bool,
                Optional("max_timeout"):
                int,
                Optional("width"):
                int,
                Optional("height"):
                int,
                Optional("area"):
                All(
                    Any(All(float, Range(min=0.0, max=1.0)), 1, 0),
                    Coerce(float),
                ),
                Optional("threshold"):
                All(int, Range(min=0, max=255)),
                Optional("alpha"):
                All(
                    Any(All(float, Range(min=0.0, max=1.0)), 1, 0),
                    Coerce(float),
                ),
                Optional("frames"):
                int,
                Optional("mask", default=[]): [{
                    Required("points"): [{
                        Required("x"): int,
                        Required("y"): int,
                    }],
                }],
                Optional("logging"):
                LOGGING_SCHEMA,
            },
            None,
        ),
        Optional("object_detection"):
        Any(
            {
                Optional("interval"): Any(int, float),
                Optional("labels"): LABELS_SCHEMA,
                Optional("logging"): LOGGING_SCHEMA,
                Optional("log_all_objects"): bool,
            },
            None,
        ),
        Optional("zones", default=[]):
        [{
            Required("name"): str,
            Required("points"): [{
                Required("x"): int,
                Required("y"): int,
            }],
            Optional("labels"): LABELS_SCHEMA,
        }],
        Optional("publish_image", default=False):
        Any(True, False),
        Optional("ffmpeg_loglevel", default="fatal"):
        Any(
            "quiet",
            "panic",
            "fatal",
            "error",
            "warning",
            "info",
            "verbose",
            "debug",
            "trace",
        ),
        Optional("ffmpeg_recoverable_errors",
                 default=FFMPEG_RECOVERABLE_ERRORS): [str],
        Optional("static_mjpeg_streams", default={}): {
            All(str, ensure_slug): MJPEG_STREAM_SCHEMA
        },
        Optional("logging"):
        LOGGING_SCHEMA,
    }, )
コード例 #32
0
    def setUp(self):
        # Task attributes common to each partner repack
        common_task_schema = Schema({
            'task': {
                'provisionerId': 'buildbot-bridge',
                'workerType': 'buildbot-bridge',
                'payload': {
                    'properties': {
                        'version': '42.0b2',
                        'build_number': 3,
                    }
                }
            }
        })

        self.partner_task_schema = common_task_schema.extend(
            {
                'task': {
                    'payload': {
                        'properties': {
                            'repack_manifests_url':
                            '[email protected]:mozilla-partners/repack-manifests.git',
                        }
                    }
                }
            },
            required=True,
            extra=True)

        self.eme_free_task_schema = common_task_schema.extend(
            {
                'task': {
                    'payload': {
                        'properties': {
                            'repack_manifests_url':
                            'https://github.com/mozilla-partners/mozilla-EME-free-manifest',
                        }
                    }
                }
            },
            required=True,
            extra=True)

        self.sha1_task_schema = common_task_schema.extend(
            {
                'task': {
                    'payload': {
                        'properties': {
                            'repack_manifests_url':
                            'https://github.com/mozilla-partners/mozilla-sha1-manifest',
                        }
                    }
                }
            },
            required=True,
            extra=True)

        test_kwargs = create_firefox_test_args({
            'push_to_candidates_enabled':
            True,
            'push_to_releases_enabled':
            True,
            'push_to_releases_automatic':
            True,
            'source_enabled':
            True,
            'signing_pvt_key':
            PVT_KEY_FILE,
            'partner_repacks_platforms': ['win32', 'linux'],
            'eme_free_repacks_platforms': ['win32', 'macosx64'],
            'sha1_repacks_platforms': ['win32'],
            'release_channels': ['foo', 'bar'],
            'en_US_config': {
                "platforms": {
                    "linux": {
                        'signed_task_id': 'abc',
                        'unsigned_task_id': 'abc'
                    },
                    "macosx64": {
                        'signed_task_id': 'abc',
                        'unsigned_task_id': 'abc'
                    },
                    "win32": {
                        'signed_task_id': 'abc',
                        'unsigned_task_id': 'abc'
                    },
                }
            },
            'l10n_config': {
                "platforms": {
                    "win32": {
                        "en_us_binary_url":
                        "https://queue.taskcluster.net/something/firefox.exe",
                        "mar_tools_url":
                        "https://queue.taskcluster.net/something/",
                        "locales": ["de", "en-GB", "zh-TW"],
                        "chunks": 1,
                    },
                    "linux": {
                        "en_us_binary_url":
                        "https://queue.taskcluster.net/something/firefox.tar.xz",
                        "mar_tools_url":
                        "https://queue.taskcluster.net/something/",
                        "locales": ["de", "en-GB", "zh-TW"],
                        "chunks": 1,
                    },
                    "macosx64": {
                        "en_us_binary_url":
                        "https://queue.taskcluster.net/something/firefox.dmg",
                        "mar_tools_url":
                        "https://queue.taskcluster.net/something/",
                        "locales": ["de", "en-GB", "zh-TW"],
                        "chunks": 1,
                    },
                },
                "changesets": {
                    "de": "default",
                    "en-GB": "default",
                    "zh-TW": "default",
                },
            },
        })

        self.graph = make_task_graph(**test_kwargs)
        self.partner_tasks = [
            get_task_by_name(
                self.graph,
                "release-foo-firefox-{}_partner_repacks".format(platform))
            for platform in ["win32", "linux"]
        ]
        self.eme_free_tasks = [
            get_task_by_name(
                self.graph,
                "release-foo-firefox-{}_eme_free_repacks".format(platform))
            for platform in ["win32", "macosx64"]
        ]
        self.sha1_tasks = [
            get_task_by_name(
                self.graph,
                "release-foo-firefox-{}_sha1_repacks".format(platform))
            for platform in ["win32"]
        ]

        self.partner_push_to_mirrors_task = get_task_by_name(
            self.graph, "release-foo-firefox_partner_repacks_copy_to_releases")
        self.push_to_mirrors_task = get_task_by_name(
            self.graph, "release-foo_firefox_push_to_releases")

        self.upstream_dependencies = [
            "release-foo_firefox_{}_complete_en-US_beetmover_candidates".
            format(platform) for platform in ["win32", "linux", "macosx64"]
        ] + [
            "release-foo_firefox_{}_l10n_repack_beetmover_candidates_1".format(
                platform) for platform in ["win32", "linux", "macosx64"]
        ]
コード例 #33
0
from dbt.node_types import NodeType

unparsed_base_contract = Schema({
    # identifiers
    Required('name'):
    All(basestring, Length(min=1, max=127)),
    Required('package_name'):
    basestring,

    # filesystem
    Required('root_path'):
    basestring,
    Required('path'):
    basestring,
    Required('original_file_path'):
    basestring,
    Required('raw_sql'):
    basestring,
})

unparsed_node_contract = unparsed_base_contract.extend({
    Required('resource_type'):
    Any(NodeType.Model, NodeType.Test, NodeType.Analysis, NodeType.Operation)
})

unparsed_nodes_contract = Schema([unparsed_node_contract])


def validate_nodes(nodes):
    validate_with(unparsed_nodes_contract, nodes)
コード例 #34
0
    if not value[-3:] == "csv":
        return False
    return True

base_model = Schema({
    Required('name'): All(Strip, str, Match("[a-zA-Z0-9 '-]+$"), Length(min=0, max=30), msg="Invalid contactId"),
    Required('role'): All(Strip, str, In(['Dev', 'Test', 'Support'], msg ="Invalid role")),
    Required('referenceId'): All(Strip, str, Match("[a-zA-Z0-9 '-]+$"), Length(min=0,max=30), msg = "Invalid referenceId"),
    Required('postalcode'): All(Strip, str, Match("^\d{5}\-\d{4}$|^\d{5}$"), Length(min=5,max=10), msg = "Invalid postalcode"),
    'payload': All(Strip, payload_validation, Length(min=0,max=1000), msg = "Invalid payload"),
    },
    extra = True
)

user_model = base_model.extend({
    Required('fileName'): All(Strip, str, Match("^[a-zA-Z0-9 '-_.]+$"), filename_validation, Length(min=0,max=100), msg = "Invalid fileName"),
})


request = {
    "name": "John",
    "role": "Dev",
    "referenceId": "reference123",
    "postalcode": "12345",
    "payload": "{\"phone_number\": \"9876543210\", \"email\": \"[email protected]\", \"date\": \"2001-01-01\"}",
    "fileName": "attachments.csv"
}

try:
    user_model(request)
    print("All validations success")