Ejemplo n.º 1
0
class Repository(SubResource):
    resource_type = 'repository'
    parent_resource = Organisation
    parent_key = 'repositories'
    read_only_fields = ['created_by']
    view = views.repositories
    active_view = views.active_repositories

    internal_fields = Document.internal_fields + ['pre_verified']

    # State transitions for repositories overridden so that:
    # - Cannot deactivate an approved repository
    # - Repository can move from approved to pending when moved to a new repository service
    approval_state_transitions = {
        None: [State.approved.name],
        State.pending.name: [State.approved.name, State.rejected.name]
    }

    state_transitions = {
        None: [State.pending.name],
        State.pending.name: [State.deactivated.name],
        State.approved.name: [State.pending.name],
        State.rejected.name: [State.deactivated.name],
        State.deactivated.name: [State.pending.name]
    }

    _repository_name_length = Length(min=options.min_length_repository_name,
                                     max=options.max_length_repository_name)

    schema = Schema({
        Required('id', default=lambda: unicode(uuid.uuid4().hex)):
        unicode,
        Required('name'):
        All(unicode, _repository_name_length),
        Required('service_id'):
        unicode,
        Required('organisation_id'):
        unicode,
        Required('state', default=SubResource.default_state.name):
        validators.validate_state,
        Required('type', default=resource_type):
        resource_type,
        Required('created_by'):
        unicode,
        Required('permissions'):
        [Any(all_permission_schema, organisation_permission_schema)],
        'pre_verified':
        bool
    })

    def clean(self, user=None):
        """Remove internal fields"""
        doc = self._resource
        internal_fields = deepcopy(self.internal_fields)
        if user is None or not user.is_user(self.organisation_id):
            internal_fields.append('permissions')

        result = {k: v for k, v in doc.iteritems() if k not in internal_fields}

        return result

    @property
    def default_permissions(self):
        default_permissions = [
            {
                'type': 'all',
                'value': None,
                'permission': 'r'
            },
        ]

        if 'organisation_id' in self._resource:
            default_permissions.append({
                'type': 'organisation_id',
                'value': self.organisation_id,
                'permission': 'rw'
            })

        return default_permissions

    @coroutine
    def validate(self):
        """Validate the resource"""
        if not self._resource.get('permissions'):
            self.permissions = self.default_permissions

        try:
            # update _resource so have default values from the schema
            self._resource = self.schema(self._resource)
        except MultipleInvalid as e:
            errors = [
                format_error(err, self.resource_type) for err in e.errors
            ]
            raise exceptions.ValidationError({'errors': errors})

        yield self.check_service()
        yield self.check_unique()

    @coroutine
    def check_service(self):
        """Check the service exists and is a repository service"""
        try:
            service = yield Service.get(self.service_id)
        except couch.NotFound:
            raise exceptions.ValidationError('Service {} not found'.format(
                self.service_id))

        if service.service_type != 'repository':
            raise exceptions.ValidationError(
                '{} is not a repository service'.format(self.service_id))

        if service.state != State.approved:
            raise exceptions.ValidationError(
                '{} is not an approved service'.format(self.service_id))

    @coroutine
    def check_unique(self):
        """Check the repository's name is unique"""
        result = yield views.repository_name.values(key=self.name)
        repo_id = getattr(self, 'id', None)
        repos = {x for x in result if x != repo_id and x}

        if repos:
            raise exceptions.ValidationError(
                "Repository with name '{}' already exists".format(self.name))

    @coroutine
    def update(self, user, **kwargs):
        service_id = kwargs.get('service_id')
        if service_id:
            can_approve = yield self.can_approve(user, **kwargs)
            if not can_approve:
                kwargs['state'] = State.pending.name

        yield super(Repository, self).update(user, **kwargs)

    @coroutine
    def can_approve(self, user, **data):
        """
        Admins of repository service or sys admins can approve a repository
        :param user: a User
        :param data: data that the user wants to update
        """
        service_id = data.get('service_id', self.service_id)

        try:
            service = yield Service.get(service_id)

            is_repo_admin = user.is_org_admin(service.organisation_id)

            is_reseller_preverifying = user.is_reseller() and data.get(
                'pre_verified', False)

            raise Return(is_repo_admin or is_reseller_preverifying)
        except couch.NotFound:
            pass

        raise Return(False)

    @coroutine
    def can_update(self, user, **kwargs):
        """
        Sys admin's can change anything

        If the user is an organisation administrator or created the repository,
        they may change any field other than "organisation_id"

        If the user is a service administrator the user may change the "state"
        but no other fields.
        """
        if user.is_admin():
            raise Return((True, set([])))

        is_creator = self.created_by == user.id
        if user.is_org_admin(self.organisation_id) or is_creator:
            fields = set([])
            if 'organisation_id' in kwargs:
                fields.add('organisation_id')

            if fields:
                raise Return((False, fields))
            else:
                raise Return((True, set([])))

        try:
            service = yield Service.get(self.service_id)

            if user.is_org_admin(service.organisation_id):
                fields = set(kwargs) - {'state'}
                if fields:
                    raise Return((False, fields))
                else:
                    raise Return((True, fields))
        except couch.NotFound:
            # will be handled in Repository.validate
            pass

        raise Return((False, set([])))

    @classmethod
    @coroutine
    def can_create(cls, user, **kwargs):
        return user.is_user(kwargs.get('organisation_id')) or (
            user.is_reseller() and kwargs.get('pre_verified', False))

    @coroutine
    def with_relations(self, user=None):
        """
        Return a cleaned dictionary including relations

        :returns: a Repository instance
        """
        repository = self.clean(user=user)
        try:
            parent = yield self.get_parent()
            repository['organisation'] = parent.clean()
        except couch.NotFound:
            parent = None
            repository['organisation'] = {'id': self.parent_id}

        service_id = self.service_id
        try:
            # TODO: cache this lookup
            service = yield Service.get(service_id)
            repository['service'] = service.clean(user=user)
        except couch.NotFound:
            # just include the service ID if cannot find the service
            repository['service'] = {'id': service_id}

        del repository['service_id']
        del repository['organisation_id']

        raise Return(repository)
Ejemplo n.º 2
0
         'locales': Any([str], [None]),
         'branch': str,
         'platform': Any(str, None),
         'version': str,
         'revision': str,
         'build_number': int,
         Extra: object,
     },
     'signing': {
         'signature': str,
     },
     Extra: object,
 },
 Required('metadata',
          msg="Required for TaskCluster schema."): {
     'name': All(str, Length(max=255)),
     'description': All(str, Length(max=32768)),
     'owner': All(Email(), Length(max=255)),
     'source': All(Url(), Length(max=4096)),
 },
 Required('payload',
          msg="Required for TaskCluster schema."): {
     Extra: object,
     Optional('properties'): {
         'version': str,
         'build_number': int,
         'release_promotion': bool,
         'revision': str,
         'product': str,
         Extra: object,
     }
Ejemplo n.º 3
0
    def validate(self, tipo_doc, data):
        doc = Schema({
            Required('serie'):
            All(str, Length(min=4, max=4)),
            Required('correlativo'):
            All(str, Length(min=1, max=8)),
            Required('nombreEmisor'):
            All(str, Length(min=1, max=100)),
            Required('tipoDocEmisor'):
            All(str,
                Length(min=1, max=2),
                msg='El tipo de Doc. Emisor debe '
                'tener un tamaño entre 1 y 2'),
            Required('numDocEmisor'):
            All(str, Length(min=1, max=25)),
            'direccionOrigen':
            All(str, Length(min=1, max=100)),
            'direccionUbigeo':
            All(str, Length(min=6, max=6)),
            Required('tipoDocReceptor'):
            All(str, Length(min=1, max=2)),
            Required('numDocReceptor'):
            All(str, Length(min=1, max=25)),
            Required('nombreReceptor'):
            All(str, Length(min=1, max=100)),
            # TODO: Verificar si hay problemas en el orden
            Required('tipoMoneda'):
            All(str, Length(min=3, max=3)),
            'mntNeto':
            Coerce(float),
            'mntTotalIgv':
            Coerce(float),
            'mntTotal':
            Coerce(float),
            'fechaVencimiento':
            All(str, Length(min=10, max=10)),
            'tipoFormatoRepresentacionImpresa':
            All(str, Length(min=1, max=100)),
        })

        if tipo_doc in '03':
            # Boletas
            doc = doc.extend({
                'direccionDestino':
                All(str, Length(min=1, max=100)),
            })
        if tipo_doc in ('07', '08'):
            # Nota Crédito
            doc = doc.extend({
                Required('sustento'):
                All(str, Length(min=1, max=100)),
                Required('tipoMotivoNotaModificatoria'):
                All(str, Length(min=2, max=2))
            })

        impuesto = Schema(
            All([{
                'codImpuesto': All(str, Length(min=1, max=4)),
                'montoImpuesto': Coerce(float),
                'tasaImpuesto': Coerce(float),
            }]))
        detalle = Schema(
            All(
                [{
                    Required('cantidadItem'):
                    Coerce(float),
                    Required('unidadMedidaItem'):
                    All(str, Length(min=1, max=3)),
                    'codItem':
                    All(str, Length(min=1, max=30)),
                    Required('nombreItem'):
                    All(str, Length(min=1, max=250)),
                    # TODO: No debe ser obligatorio para Notas
                    Required('precioItem'):
                    Coerce(float),
                    Required('precioItemSinIgv'):
                    Coerce(float),
                    Required('montoItem'):
                    Coerce(float),
                    # TODO-FIN
                    'descuentoMonto':
                    Coerce(float),
                    Required('codAfectacionIgv'):
                    All(str, Length(min=2, max=2)),
                    'tasaIgv':
                    Coerce(float),
                    'montoIgv':
                    Coerce(float),
                    Required('idOperacion'):
                    All(str, Length(min=1, max=80))
                }],
                Length(min=1)))
        descuento = Schema(All({
            'mntTotalDescuentos': Coerce(float),
        }))

        schema = Schema({
            Required('documento'):
            doc,
            Required('tipoDocumento'):
            All(str, Length(min=2, max=2)),
            Required('fechaEmision'):
            All(str, Length(min=10, max=10)),
            Required('idTransaccion'):
            All(str, Length(min=1)),
            'correoReceptor':
            str,
            Required('impuesto'):
            impuesto,
            Required('detalle'):
            detalle,
            'descuento':
            descuento,
        })
        if tipo_doc in ('07', '08'):
            referencia = Schema(
                All([{
                    'tipoDocumentoRef': All(str, Length(min=1, max=2)),
                    'serieRef': All(str, Length(min=4, max=4)),
                    'correlativoRef': All(str, Length(min=1, max=8)),
                    'fechaEmisionRef': All(str, Length(min=10, max=10)),
                }]))

            schema = schema.extend({
                'referencia': referencia,
            })
        return schema(data)
Ejemplo n.º 4
0
#-*- coding: utf-8 -*-
from tornado.httputil import url_concat
from voluptuous import Schema, Length, Any

from autumn.torn.form import Form
from .. import BaseHandler
from .. import require
from autumn.torn.paginator import Paginator

notice_schema = Schema(
    {
        'title': Length(min=1),
        'content': Length(min=1),
        'id': str,
        'action': Any('edit', 'add'),
    },
    extra=True)


class Show(BaseHandler):
    def get(self):
        form = Form(self.request.arguments, notice_schema)
        sql = 'select * from news where deleted = 0 and type = 1 order by created_at desc'
        params = []
        page = Paginator(self, sql, params)

        self.render('admin/notice_list.html', form=form, page=page)


class Add(BaseHandler):
    @require('admin')
Ejemplo n.º 5
0
def job(extra_context_variables=[]):
    context_variables = CONTEXT_VARIABLES + extra_context_variables
    lava_lxc = {
        Required("name"): str,
        Required("distribution"): str,
        Required("release"): str,
        Optional("arch"): str,
        Optional("mirror"): str,
        Optional("persist"): bool,
        Optional("security_mirror"): str,
        Optional("template"): str,
        Optional("timeout"): timeout(),
        Optional("verbose"): bool,
    }

    return All(
        {
            Required("job_name"):
            All(str, Length(min=1, max=200)),
            Optional("device_type"):
            All(str, Length(min=1, max=200)),
            Required("timeouts"): {
                Required("job"): timeout(),
                Optional("action"): timeout(),
                Optional("actions"): {
                    str: timeout()
                },
                Optional("connection"): timeout(),
                Optional("connections"): {
                    str: timeout()
                },
            },
            Required("visibility"):
            Any("public", "personal", {"group": [str]}),
            Optional("context"):
            Schema({In(context_variables): Any(int, str, [int, str])},
                   extra=False),
            Optional("metadata"): {
                str: object
            },
            Optional("priority"):
            Any("high", "medium", "low", Range(min=0, max=100)),
            Optional("tags"): [str],
            Optional("secrets"):
            dict,
            Optional("environment"):
            dict,
            Optional("protocols"): {
                Optional("lava-lxc"):
                Any(lava_lxc, {str: lava_lxc}),
                Optional("lava-multinode"): {
                    Required("roles"): {
                        str:
                        Any(
                            {
                                Required("device_type"):
                                str,
                                Required("count"):
                                Range(min=0),
                                Optional("context"):
                                Schema(
                                    {
                                        In(context_variables):
                                        Any(int, str, [int, str])
                                    },
                                    extra=False,
                                ),
                                Optional("tags"): [str],
                                Optional("environment"):
                                dict,
                                Optional("timeout"):
                                timeout(),
                            },
                            {
                                Required("connection"):
                                str,
                                Required("count"):
                                Range(min=0),
                                Required("expect_role"):
                                str,
                                Required("host_role"):
                                str,
                                Optional("request"):
                                str,
                                Optional("tags"): [str],
                                Optional("timeout"):
                                timeout(),
                                Optional("context"):
                                Schema(
                                    {
                                        In(context_variables):
                                        Any(int, str, [int, str])
                                    },
                                    extra=False,
                                ),
                            },
                        )
                    },
                    Optional("timeout"): timeout(),
                },
                Optional("lava-vland"):
                Any(
                    {str: {
                        str: {
                            Required("tags"): [str]
                        }
                    }},
                    {str: {
                        Required("tags"): [str]
                    }},
                ),
                Optional("lava-xnbd"): {
                    Required("port"): Any("auto", int),
                    Optional("timeout"): timeout(),
                },
            },
            Optional("notify"):
            notify(),
            Optional("reboot_to_fastboot"):
            bool,
            Required("actions"): [{
                Any("boot", "command", "deploy", "test"):
                dict
            }],
        },
        extra_checks,
    )
Ejemplo n.º 6
0

def custom_schema():
    custom_dict = {}

    for tag, settings in ops_settings['column_mappings'].items():
        custom_dict[tag] = Required(list, msg='Must be a list')

    return custom_dict


geom_schema = {
    Optional('geojson'):
    Required(object, msg='Must be a geojson object'),
    Optional('bbox'):
    Required(All(list, Length(min=2, max=2)),
             msg='Must be length of {}'.format(2)),
    Optional('buffer'):
    Required(All(
        Coerce(int),
        Range(min=0, max=ops_settings['maximum_search_radius_for_points'])),
             msg='Must be between 1 and {}'.format(
                 ops_settings['maximum_search_radius_for_points']))
}

filters_schema = {
    Optional('category_group_ids'):
    Required(All(categories_tools.category_group_ids,
                 Length(max=ops_settings['maximum_categories'])),
             msg='Must be one of {} and have a maximum amount of {}'.format(
                 categories_tools.category_group_ids,
Ejemplo n.º 7
0
"""
API functions relating to team management.
"""

import api
from api.annotations import log_action
from api.common import (check, InternalException, safe_fail,
                        SevereInternalException, validate, WebException)
from voluptuous import Length, Required, Schema

new_team_schema = Schema(
    {
        Required("team_name"):
        check(
            ("The team name must be between 3 and 40 characters.",
             [str, Length(min=3, max=40)]),
            ("A team with that name already exists.",
             [lambda name: safe_fail(api.team.get_team, name=name) is None]),
            ("A username with that name already exists.",
             [lambda name: safe_fail(api.user.get_user, name=name) is None]),
        ),
        Required("team_password"):
        check(("Passwords must be between 3 and 20 characters.",
               [str, Length(min=3, max=20)]))
    },
    extra=True)

join_team_schema = Schema(
    {
        Required("team_name"):
        check(("The team name must be between 3 and 40 characters.",
Ejemplo n.º 8
0
        },
        Optional("connections"): {
            All(action_name): _timeout_schema()
        },
    })


def visibility_schema():
    # possible values - 1 of 2 strings or a specified dict
    return Schema(Any("public", "personal", {"group": [str]}))


_job_schema = Schema({
    "device_type":
    All(str,
        Length(min=1)),  # not Required as some protocols encode it elsewhere
    Required("job_name"): All(str, Length(min=1, max=200)),
    Optional("priority"): Any("high", "medium", "low", int),
    Optional("protocols"): _job_protocols_schema(),
    Optional("context"): _context_schema(),
    Optional("metadata"): All({Any(str, int): Any(str, int)}),
    Optional("secrets"): dict,
    Optional("environment"): dict,
    Optional("tags"): [str],
    Required("visibility"): visibility_schema(),
    Required("timeouts"): _job_timeout_schema(),
    Required("actions"): _job_actions_schema(),
    Optional("notify"): _job_notify_schema(),
    Optional("reboot_to_fastboot"): bool,
})
Ejemplo n.º 9
0
    S3FileSystem,
    Schemes,
    get_cloud_fs,
)
from .utils import relpath
from .utils.fs import path_isin

if TYPE_CHECKING:
    from dvc_objects.db import ObjectDB

logger = logging.getLogger(__name__)


CHECKSUM_SCHEMA = Any(
    None,
    And(str, Length(max=0), SetTo(None)),
    And(Any(str, And(int, Coerce(str))), Length(min=3), Lower),
)

CASE_SENSITIVE_CHECKSUM_SCHEMA = Any(
    None,
    And(str, Length(max=0), SetTo(None)),
    And(Any(str, And(int, Coerce(str))), Length(min=3)),
)

# NOTE: currently there are only 3 possible checksum names:
#
#    1) md5 (LOCAL, SSH);
#    2) etag (S3, GS, OSS, AZURE, HTTP);
#    3) checksum (HDFS);
#
Ejemplo n.º 10
0
computation_schema = Schema({
    Inclusive('x', 'proj'): Any(float, int),
    Inclusive('y', 'proj'): Any(float, int),
    Inclusive('latitude', 'geo'): Any(float, int),
    Inclusive('longitude', 'geo'): Any(float, int)
})

stats_schema = Schema(
    {
        'date_ranges':
        date_ranges_schema,
        'location':
        str,
        'sources':
        All([source_schema], Length(min=1)),
        'storage':
        storage_schema,
        'output_products':
        All([output_product_schema], Length(min=1)),
        Optional('computation'): {
            'chunking': computation_schema
        },
        Optional('input_region'):
        Any(single_tile, tile_list, from_file, geometry, boundary_coords),
        Optional('global_attributes'):
        dict,
        Optional('var_attributes'): {
            str: {
                str: str
            }
Ejemplo n.º 11
0
from voluptuous import Schema, All, Required, Optional, Length, Boolean

data_source_schema = Schema({
    Required('ismodify'):
    Boolean(),
    Optional('datasourceid'):
    int,
    Required('datasourcename'):
    All(str, Length(max=200)),
})
Ejemplo n.º 12
0
import api
import bcrypt
from api.annotations import log_action
from api.common import InternalException, safe_fail, validate, WebException
from api.user import check
from flask import session
from voluptuous import Length, Required, Schema

log = api.logger.use(__name__)

debug_disable_general_login = False

user_login_schema = Schema({
    Required('username'): check(
        ("Usernames must be between 3 and 50 characters.", [str, Length(min=3, max=50)]),
    ),
    Required('password'): check(
        ("Passwords must be between 3 and 50 characters.", [str, Length(min=3, max=50)])
    )
})

def confirm_password(attempt, password_hash):
    """
    Verifies the password attempt

    Args:
        attempt: the password attempt
        password_hash: the real password pash
    """
    return bcrypt.hashpw(attempt.encode('utf-8'), password_hash) == password_hash
Ejemplo n.º 13
0
        try:
            name = function.__name__ + '_validator'  # find validator name
            globals()[name](kwargs)  # call validation function
            return function(*args, **kwargs)
        except KeyError:
            raise Exception("Could not find validation schema for the"
                            " function " + function.__name__)

    return wrapper


create_user_validator = Schema({
    Required('user_id'): basestring,
    'roles': [Any('user', 'superuser')],
    'netmask': basestring,
    'secret': All(basestring, Length(min=8, max=64)),
    'pubkey': basestring
})

update_user_validator = Schema({
    Required('user_id'): basestring,
    'roles': [Any('user', 'superuser')],
    'netmask': basestring,
    'secret': All(basestring, Length(min=8, max=64)),
    'pubkey': basestring
})

create_pos_validator = Schema({
    Required('name'): basestring,
    Required('pos_type'): basestring,
    Required('pos_id'): basestring,
Ejemplo n.º 14
0
    def schema(self):
        name_length = Length(min=options.min_length_organisation_name,
                             max=options.max_length_organisation_name)

        filtered_fields = ['id', 'organisation_id']

        schema = Schema({
            Required('name'):
            All(unicode, name_length),
            Required('state', default=self.default_state.name):
            validators.validate_state,
            Required('created_by'):
            unicode,
            Required('type', default=self.resource_type):
            self.resource_type,
            Required('star_rating', default=0):
            Range(0, 5),
            Required('services', default={}): {
                Extra: partial_schema(Service.schema, filtered_fields)
            },
            Required('repositories', default={}): {
                Extra: partial_schema(Repository.schema, filtered_fields)
            },
            '_id':
            unicode,
            '_rev':
            unicode,
            'doc_version':
            unicode,
            'description':
            unicode,
            'address':
            unicode,
            'email':
            Any(validators.valid_email, ''),
            'phone':
            unicode,
            'website':
            unicode,
            'facebook':
            unicode,
            'twitter':
            unicode,
            'google_plus':
            unicode,
            'instagram':
            unicode,
            'youtube':
            unicode,
            'linkedin':
            unicode,
            'myspace':
            unicode,
            'payment': {
                'url': Any(validators.validate_url, ''),
                'source_id_type': unicode
            },
            'reference_links':
            validators.validate_reference_links,
            'logo':
            Any(validators.validate_url, '', None),
            'primary_color':
            validators.validate_hex,
            'secondary_color':
            validators.validate_hex,
            'modal_header_text':
            unicode,
            'modal_footer_text':
            unicode,
            'modal_link_text':
            unicode,
            'modal_link_url':
            Any(validators.validate_url, '', None),
            'pre_verified':
            bool
        })

        return schema
Ejemplo n.º 15
0
def test_fix_157():
    s = Schema(All([Any('one', 'two', 'three')]), Length(min=1))
    assert_equal(['one'], s(['one']))
    assert_raises(MultipleInvalid, s, ['four'])
Ejemplo n.º 16
0
    return f


def POSIX(msg=None):
    def f(i):
        try:
            return dt.fromtimestamp(int(i))
        except:
            raise Invalid(msg or ("Invalid date format."))

    return f


newUserSchema = Schema({
    'username':
    All(str, Length(min=4, msg="Username must be at least 4 characters."),
        NotEmail("Username cannot be an email address.")),
    'password':
    All(str, Length(min=8, msg="Password must be at least 8 characters.")),
    'email':
    All(str, Email()),
})

userSchema = Schema({
    'username': All(str, Length(min=4), NotEmail()),
    'password': All(str, Length(min=8)),
    'email': All(str, Email()),
    'active': bool,
    'sms_number': int
})
Ejemplo n.º 17
0
EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY = set(
    (str(i) + 'pct' for i in range(1, 100)))

for agg in list(EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY):
    EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY.add("rate:%s" % agg)

# The aggregation method that one can use to configure the archive
# policies also supports the 'pct' (percentile) operation. Therefore,
# we also expose this as a configuration.
VALID_AGGREGATION_METHODS_FOR_METRICS = BASIC_AGGREGATION_METHODS.union(
    EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY)

GNOCCHI_EXTRA_SCHEMA = {
    Required('extra_args'): {
        Required('resource_type'):
        All(str, Length(min=1)),
        # Due to Gnocchi model, metric are grouped by resource.
        # This parameter permits to adapt the key of the resource identifier
        Required('resource_key', default='id'):
        All(str, Length(min=1)),
        Required('aggregation_method', default='max'):
        In(VALID_AGGREGATION_METHODS_FOR_METRICS),
        Required('re_aggregation_method', default='max'):
        In(BASIC_AGGREGATION_METHODS),
        Required('force_granularity', default=3600):
        All(int, Range(min=0)),
        Required('use_all_resource_revisions', default=True):
        All(bool),
        # Provide means for operators to customize the aggregation query
        # executed against Gnocchi. By default we use the following:
        #
Ejemplo n.º 18
0
class ElasticsearchBucket(Bucket):
    """
    Elasticsearch bucket
    """

    SCHEMA = Bucket.SCHEMA.extend({
        Required('addr'):
        str,
        Required('index'):
        str,
        Optional('doc_type', default='doc'):
        str,
        'routing':
        str,
        Optional('dbuser'):
        All(schemas.key, Length(max=256)),
        Optional('dbuser_password'):
        str,
        Optional('ca_certs'):
        IsFile(),
        Optional('client_cert'):
        IsFile(),
        Optional('client_key'):
        IsFile(),
        Optional('use_ssl', default=False):
        Boolean(),
        Optional('verify_ssl', default=False):
        Boolean(),
        Optional('number_of_shards', default=1):
        All(int, Range(min=1)),
        Optional('number_of_replicas', default=0):
        All(int, Range(min=0)),
    })

    def __init__(self, cfg):
        cfg['type'] = 'elasticsearch'
        super().__init__(cfg)
        self._es = None
        self._touched_indices = []

    @property
    def number_of_shards(self):
        return int(self.cfg.get('number_of_shards') or 1)

    @property
    def number_of_replicas(self):
        return int(self.cfg.get('number_of_replicas') or 0)

    @property
    def addr(self):
        return self.cfg['addr']

    @property
    def index(self):
        return self.cfg['index']

    @property
    def doc_type(self):
        return self.cfg['doc_type']

    @property
    def timeout(self):
        return self.cfg.get('timeout', 30)

    @property
    def dbuser(self):
        return self.cfg.get('dbuser')

    @property
    def dbuser_password(self):
        return self.cfg.get('dbuser_password')

    @property
    def use_ssl(self):
        return self.cfg.get('use_ssl') or False

    @property
    def verify_ssl(self):
        return self.cfg.get('verify_ssl') or False

    @property
    def ca_certs(self):
        return self.cfg.get('ca_certs')

    @property
    def client_cert(self):
        return self.cfg.get('client_cert')

    @property
    def client_key(self):
        return self.cfg.get('client_key')

    @property
    def es(self):
        if self._es is None:
            addr = parse_addr(self.addr, default_port=9200)
            logging.info('connecting to elasticsearch on %s:%d', addr['host'],
                         addr['port'])
            self._es = Elasticsearch(
                [addr],
                timeout=self.timeout,
                http_auth=(self.dbuser,
                           self.dbuser_password) if self.dbuser else None,
                use_ssl=self.use_ssl,
                verify_certs=self.verify_ssl,
                ca_certs=self.ca_certs,
                client_cert=self.client_cert,
                client_key=self.client_key,
            )

        # urllib3 & elasticsearch modules log exceptions, even if they are
        # caught! Disable this.
        urllib_logger = logging.getLogger('urllib3')
        urllib_logger.setLevel(logging.CRITICAL)
        es_logger = logging.getLogger('elasticsearch')
        es_logger.setLevel(logging.CRITICAL)

        return self._es

    def init(self, data_schema=None, *args, **kwargs):
        """
        Create index and write mapping
        """
        if data_schema and self.timestamp_field:
            data_schema[self.timestamp_field] = {
                "type": "date",
                "format": "epoch_millis",
            }
        if data_schema:
            info = self.es.info()
            mapping = {"properties": data_schema}
            if not self.es.indices.exists(index=self.index, ):
                params = {}
                if version(info['version']['number']) >= version('7.0.0'):
                    params['include_type_name'] = 'true'
                mappings = {
                    'mappings': {
                        self.doc_type: {
                            "properties": data_schema
                        }
                    },
                    'settings': {
                        "number_of_shards": self.number_of_shards,
                        "number_of_replicas": self.number_of_replicas,
                        "codec": "best_compression",
                    }
                }
                self.es.indices.create(
                    index=self.index,
                    body=mappings,
                    params=params,
                )
            params = {
                'allow_no_indices': 'true',
                'ignore_unavailable': 'true',
            }
            if version(info['version']['number']) >= version('7.0.0'):
                params['include_type_name'] = 'true'

            self.es.indices.put_mapping(
                doc_type=self.doc_type,
                body=mapping,
                index=self.index,
                params=params,
            )

    def drop(self, index=None):
        """
        Delete index
        """
        if index is None:
            index = self.index
        self.es.indices.delete(index, ignore=404)

    def send_bulk(self, requests):
        """
        Send data to Elasticsearch
        """
        logging.info("commit %d change(s) to elasticsearch", len(requests))

        try:
            helpers.bulk(
                self.es,
                requests,
                chunk_size=5000,
                timeout="30s",
            )
        except (
                urllib3.exceptions.HTTPError,
                elasticsearch.exceptions.TransportError,
        ) as exn:
            raise errors.TransportError(str(exn))

    def refresh(self, index=None):
        """
        Explicitely refresh index
        """

        if index is None:
            indices = self._touched_indices
            self._touched_indices = []
        else:
            indices = [index]

        for i in indices:
            self.es.indices.refresh(i)

    def get_index_name(self, index=None, timestamp=None):
        """
        Build index name
        """

        if index is None:
            index = self.index

        if '*' in index:
            if timestamp is None:
                dt = datetime.datetime.now()
            else:
                dt = datetime.datetime.fromtimestamp(timestamp)

            index = index.replace('*', dt.strftime("%Y.%m.%d"))

        return index

    def insert_data(
        self,
        data,
        index=None,
        doc_type=None,
        doc_id=None,
        timestamp=None,
    ):
        """
        Insert entry into the index
        """

        index = self.get_index_name(index, timestamp)

        req = {
            '_index': index,
            '_type': doc_type or self.doc_type,
            '_source': data,
        }

        if doc_id is not None:
            req['_id'] = doc_id

        self.enqueue(req)
        self._touched_indices.append(index)

    def insert_times_data(self,
                          ts,
                          data,
                          tags=None,
                          index=None,
                          doc_type=None,
                          doc_id=None,
                          *args,
                          **kwargs):
        """
        Insert time-indexed entry
        """
        ts = make_ts(ts)

        data[self.timestamp_field] = ts_to_ms(ts)

        if tags is not None:
            for tag, tag_val in tags.items():
                data[tag] = tag_val

        self.insert_data(
            data,
            index=index or self.index,
            doc_type=doc_type or self.doc_type,
            doc_id=doc_id,
            timestamp=int(ts),
        )

    def search(self, body, index=None, routing=None, doc_type=None, size=0):
        """
        Send search query to Elasticsearch
        """

        if index is None:
            index = self.index

        params = {}
        if routing is not None:
            params['routing'] = routing

        try:
            return self.es.search(
                index=index,
                doc_type=doc_type or self.doc_type,
                size=size,
                body=body,
                params=params,
            )
        except elasticsearch.exceptions.TransportError as exn:
            raise errors.TransportError(str(exn))
        except urllib3.exceptions.HTTPError as exn:
            raise errors.BucketError(self.name, str(exn))

    @staticmethod
    def _build_aggs(features):
        """
        Build Elasticsearch aggregations
        """

        aggs = {}

        for feature in features:
            if feature.metric in ['mean', 'average']:
                feature.metric = 'avg'
            if feature.metric in ['std_deviation', 'variance']:
                sub_agg = 'extended_stats'
            else:
                sub_agg = 'stats'

            if feature.script:
                agg = {
                    sub_agg: {
                        "script": {
                            "lang": "painless",
                            "inline": feature.script,
                        }
                    }
                }
            elif feature.field:
                agg = {
                    sub_agg: {
                        "field": feature.field,
                    }
                }

            aggs[feature.name] = agg

        return aggs

    @classmethod
    def _build_times_query(
        cls,
        bucket_interval,
        features,
        from_ms,
        to_ms,
        timestamp_field,
    ):
        """
        Build Elasticsearch query for time-series
        """

        body = {
            "size": 0,
            "aggs": {
                "histogram": {
                    "date_histogram": {
                        "field":
                        timestamp_field,
                        "extended_bounds":
                        _build_extended_bounds(from_ms,
                                               to_ms - 1000 * bucket_interval),
                        "interval":
                        "%ds" % bucket_interval,
                        "min_doc_count":
                        0,
                        "time_zone":
                        "UTC",
                        "format":
                        "yyyy-MM-dd'T'HH:mm:ss'Z'",  # key_as_string
                        "order": {
                            "_key": "asc"
                        }
                    },
                    "aggs": {},
                }
            }
        }

        must = []

        date_range = _build_date_range(timestamp_field, from_ms, to_ms)
        if date_range is not None:
            must.append(date_range)

        for feature in features:
            match_all = _build_match_all(feature.match_all)
            for condition in match_all:
                must.append(condition)

        if len(must) > 0:
            body['query'] = {
                'bool': {
                    'must': must,
                }
            }

        aggs = cls._build_aggs(features)

        for x in sorted(aggs):
            body['aggs']['histogram']['aggs'][x] = aggs[x]

        return body

    @staticmethod
    def _get_agg_val(bucket, feature):
        """
        Get aggregation value for the bucket returned by Elasticsearch
        """
        agg_val = bucket[feature.name].get(feature.metric)

        if agg_val is None:
            logging.info(
                "missing data: field '%s', metric: '%s', bucket: %s",
                feature.field,
                feature.metric,
                bucket['key'],
            )

        return agg_val

    def get_times_data(
        self,
        bucket_interval,
        features,
        from_date=None,
        to_date=None,
    ):
        from_ms, to_ms = _date_range_to_ms(from_date, to_date)

        body = self._build_times_query(
            bucket_interval,
            features,
            from_ms=from_ms,
            to_ms=to_ms,
            timestamp_field=self.timestamp_field,
        )

        es_res = self.search(
            body,
            routing=None,
        )

        hits = es_res['hits']['total']
        if hits == 0:
            return

        # TODO: last bucket may contain incomplete data when to_date == now
        """
        now = datetime.datetime.now().timestamp()
        epoch_ms = 1000 * int(now)
        min_bound_ms = 1000 * int(now / bucket_interval) * bucket_interval
        """

        t0 = None

        for bucket in es_res['aggregations']['histogram']['buckets']:
            X = np.full(len(features), np.nan, dtype=float)
            timestamp = int(bucket['key'])
            timeval = bucket['key_as_string']

            for i, feature in enumerate(features):
                X[i] = self._get_agg_val(bucket, feature)

            # TODO: last bucket may contain incomplete data when to_date == now
            """
            try:
                # The last interval contains partial data
                if timestamp == min_bound_ms:
                    R = float(epoch_ms - min_bound_ms
                       ) / (1000 * bucket_interval)
                    X = R * X + (1-R) * X_prev
            except NameError:
                # X_prev not defined. No interleaving required.
                pass

            X_prev = X
            """

            if t0 is None:
                t0 = timestamp

            yield (timestamp - t0) / 1000, X, timeval
Ejemplo n.º 19
0
from voluptuous import Schema, Required, All, Length, Range, Invalid, Coerce
from common.data_facade import DataFacade, DEFAULT_PAGE_SIZE

app = Flask('topsecret')
data_facade = DataFacade(app)

validate_get_page = Schema({
    Required('page', default=1):
    All(Coerce(int), Range(min=1), msg='Page must be an integer >= 1'),
    Required('page_size', default=DEFAULT_PAGE_SIZE):
    All(Coerce(int),
        Range(min=1, max=1000),
        msg='Page size must be an integer >= 1 and <= 1000'),
    'body':
    All(unicode,
        Length(min=1),
        msg="Body search must be a nonzero-length string if specified"),
    'sender':
    All(unicode,
        Length(min=1),
        msg="Sender search must be a nonzero-length string if specified"),
    'recipient':
    All(unicode,
        Length(min=1),
        msg="Recipient search must be a nonzero-length string if specified"),
    'sort':
    All(unicode,
        Length(min=1),
        msg="Sort attribute must be a nonzero-length string if specified")
})
Ejemplo n.º 20
0
    cfg.StrOpt(
        'region_name',
        default='RegionOne',
        help='Region Name',
        deprecated_group=DEPRECATED_COLLECTOR_GNOCCHI_OPTS,
    ),
]

cfg.CONF.register_opts(keystone_opts, COLLECTOR_GNOCCHI_OPTS)
cfg.CONF.register_opts(collector_gnocchi_opts, COLLECTOR_GNOCCHI_OPTS)

CONF = cfg.CONF

GNOCCHI_EXTRA_SCHEMA = {
    Required('extra_args'): {
        Required('resource_type'): All(str, Length(min=1)),
        # Due to Gnocchi model, metric are grouped by resource.
        # This parameter permits to adapt the key of the resource identifier
        Required('resource_key', default='id'): All(str, Length(min=1)),
        Required('aggregation_method', default='max'):
            In(['max', 'mean', 'min']),
    },
}


class AssociatedResourceNotFound(Exception):
    """Exception raised when no resource can be associated with a metric."""

    def __init__(self, resource_key, resource_id):
        super(AssociatedResourceNotFound, self).__init__(
            'Resource with {}={} could not be found'.format(
Ejemplo n.º 21
0
        return member


GAMES = Games()

create_game_validator = Schema({
    Required('team'): str,
    Required('team_members'): All([
        {
            Required('name'): str,
            Required('level', default=1): All(int, Range(min=1)),
            Required('theme', default=''): str,
            Required('level_max',
                     default=None): Any(None, All(int, Range(min=1))),
        }
    ], Length(min=1)),
    Required('state', default='ready'): 'ready',
    Required('duration'): All(int, Range(min=1)),
})


get_game_validator = Schema({
    Required('team'): str,
    Required('team_members'): All([
        {
            Required('name'): str,
            Required('level'): All(int, Range(min=1)),
            Required('theme'): str,
            Required('id'): str,
            Required('level_max'): All(int, Range(min=1)),
            Remove('levels_obj'): BaseLevel
Ejemplo n.º 22
0
PROMETHEUS_COLLECTOR_OPTS = 'collector_prometheus'
pcollector_collector_opts = [
    cfg.StrOpt(
        'prometheus_url',
        default='',
        help='Prometheus service URL',
    ),
]
cfg.CONF.register_opts(pcollector_collector_opts, PROMETHEUS_COLLECTOR_OPTS)

CONF = cfg.CONF

PROMETHEUS_EXTRA_SCHEMA = {
    Required('extra_args'): {
        Required('query'): All(str, Length(min=1)),
    }
}


class PrometheusConfigError(collect_exceptions.CollectError):
    pass


class PrometheusClient(object):
    @classmethod
    def build_query(cls, source, query, start, end, period, metric_name):
        """Build PromQL instant queries."""
        start = ck_utils.iso8601_from_timestamp(start)
        end = ck_utils.iso8601_from_timestamp(end)
Ejemplo n.º 23
0
    return camera


def check_for_hwaccels(hwaccel_args: List[str]) -> List[str]:
    if hwaccel_args:
        return hwaccel_args

    if os.getenv(ENV_VAAPI_SUPPORTED) == "true":
        return HWACCEL_VAAPI
    return hwaccel_args


STREAM_SCEHMA = Schema({
    Required("path"):
    All(str, Length(min=1)),
    Optional("width", default=None):
    Any(int, None),
    Optional("height", default=None):
    Any(int, None),
    Optional("fps", default=None):
    Any(All(int, Range(min=1)), None),
    Optional("input_args", default=CAMERA_INPUT_ARGS):
    list,
    Optional("hwaccel_args", default=CAMERA_HWACCEL_ARGS):
    check_for_hwaccels,
    Optional("codec", default=""):
    str,
    Optional("rtsp_transport", default="tcp"):
    Any("tcp", "udp", "udp_multicast", "http"),
    Optional("filter_args", default=[]):
Ejemplo n.º 24
0
from voluptuous import Schema, Required, Optional, All, Any, Length, Number, Extra

_string = Any(bytes, str)

_non_empty_string = All(_string, Length(min=1))

_parameter_value = _string

_parameter_basic = {
    Required('name'): _non_empty_string,
    Required('value'): _parameter_value,
}

_parameter_description = {
    Required('summary'): _non_empty_string,
    Optional('verbose'): _non_empty_string
}

_parameter_full = _parameter_basic.copy()
_parameter_full.update({
    Required('description'): _parameter_description,
    Optional('type'): Any(_non_empty_string, [_non_empty_string]),
    Optional('visibility'): _non_empty_string,
    Optional('display'): _non_empty_string,
    Optional('options'): [(_non_empty_string, _non_empty_string)],
})

_citation = {
    Required('citation'): _string,
    Required('description'): _string,
    Required('bibtex'): _string,
Ejemplo n.º 25
0
# -*- coding: utf-8 -*-
import string
from voluptuous import Schema, Length, Any

from autumn.torn.form import Form
from .. import BaseHandler
from .. import require
from autumn.torn.paginator import Paginator

stock_schema = Schema(
    {
        'num': Length(min=1),
        'remark': Length(min=1),
        'action': Any('in', 'out'),
        'sku': str,
        'skuId': str,
    },
    extra=True)


class StockIn(BaseHandler):
    @require('storage')
    def get(self):
        form = Form(self.request.arguments, stock_schema)
        form.action.value = 'in'

        self.render('real/stock.html', form=form, error='')

    @require('storage')
    def post(self):
        form = Form(self.request.arguments, stock_schema)

def list_item_length(v):
    if not isinstance(v, list):
        raise Invalid('Not a list: {}'.format(type(v)))
    for i in v:
        if not isinstance(i, (str, unicode)):
            raise Invalid(
                'list item <{}> -> {} not one of (str, unicode)'.format(
                    i, type(i)))
        if len(i) > MAX_ITEM_LENGTH:
            raise Invalid('length {} > {}'.format(len(i), MAX_ITEM_LENGTH))


alarm_definition_schema = {
    Required('name'): All(Any(str, unicode), Length(max=255)),
    Required('expression'): All(Any(str, unicode)),
    Marker('description'): All(Any(str, unicode), Length(max=255)),
    Marker('severity'): All(Upper, Any('LOW', 'MEDIUM', 'HIGH', 'CRITICAL')),
    Marker('match_by'): Any([unicode], [str]),
    Marker('ok_actions'): list_item_length,
    Marker('alarm_actions'): list_item_length,
    Marker('undetermined_actions'): list_item_length,
    Marker('actions_enabled'): bool
}


def validate(msg, require_all=False):
    try:
        request_body_schema = Schema(alarm_definition_schema,
                                     required=require_all,
Ejemplo n.º 27
0
from flask_mail import Message

import api

mail = None

from api.common import check, validate, safe_fail
from api.common import WebException, InternalException
from voluptuous import Required, Length, Schema
from datetime import datetime

password_reset_request_schema = Schema({
    Required('username'):
    check(("Usernames must be between 3 and 20 characters.",
           [str, Length(min=3, max=20)]), )
})

password_reset_schema = Schema({
    Required("token"):
    check(("This does not look like a valid token.", [str,
                                                      Length(max=100)])),
    Required('password'):
    check(("Passwords must be between 3 and 20 characters.",
           [str, Length(min=3, max=20)]))
})


def reset_password(token_value, password, confirm_password):
    """
    Perform the password update operation.
Ejemplo n.º 28
0
CONF.register_opts(collect_opts, 'collect')

COLLECTORS_NAMESPACE = 'cloudkitty.collector.backends'


def MetricDict(value):
    if isinstance(value, dict) and len(value.keys()) > 0:
        return value
    raise Invalid("Not a dict with at least one key")


CONF_BASE_SCHEMA = {Required('metrics'): MetricDict}

METRIC_BASE_SCHEMA = {
    # Display unit
    Required('unit'): All(str, Length(min=1)),
    # Factor for unit converion
    Required('factor', default=1):
        Any(int, float, Coerce(fractions.Fraction)),
    # Offset for unit conversion
    Required('offset', default=0):
        # [int, float, fractions.Fraction],
        Any(int, float, Coerce(fractions.Fraction)),
    # Name to be used in dataframes, and used for service creation in hashmap
    # module. Defaults to the name of the metric
    Optional('alt_name'): All(str, Length(min=1)),
    # This is what metrics are grouped by on collection.
    Required('groupby', default=list): [
        All(str, Length(min=1))
    ],
    # Available in HashMap
Ejemplo n.º 29
0
        "end": 4242
    }]
})

default_local_config = ConfigDict({
    # the externally accessible address of this server
    "hostname":
    "127.0.0.1",

    # the url of the web server
    "web_server":
    "http://127.0.0.1",
})

problem_schema = Schema({
    Required("author"): All(str, Length(min=1, max=32)),
    Required("score"): All(int, Range(min=0)),
    Required("name"): All(str, Length(min=1, max=32)),
    Required("description"): str,
    Required("category"): All(str, Length(min=1, max=32)),
    Required("hints"): list,
    Required("organization"): All(str, Length(min=1, max=32)),
    Required("event"): All(str, Length(min=1, max=32)),
    "unique_name": str,
    "static_flag": bool,
    "walkthrough": All(str, Length(min=1, max=512)),
    "version": All(str, Length(min=1, max=8)),
    "tags": list,
    "pkg_description": All(str, Length(min=1, max=256)),
    "pkg_name": All(str, Length(min=1, max=32)),
    "pkg_dependencies": list,
Ejemplo n.º 30
0
    groups = defaultdict(lambda: defaultdict(set))

    for p in sorted(permissions, key=itemgetter('type')):
        permission_set = groups[p['type']][p.get('value')]
        permission_set.add(p['permission'])

        if p['permission'] == 'rw':
            permission_set.update({'r', 'w'})

    # the 'all' permission type always has None as the value
    groups['all'] = groups['all'][None]

    return groups


service_name_length = Length(min=options.min_length_service_name,
                             max=options.max_length_service_name)


def validate_service_schema(v):
    if v['service_type'] != 'external' and 'location' not in v:
        raise Invalid('location is required')
    return v


class Service(SubResource):
    resource_type = 'service'
    parent_resource = Organisation
    parent_key = 'services'
    read_only_fields = ['created_by']
    view = views.services
    active_view = views.active_services