Esempio n. 1
0
def includeme(config):
    """Pre-parse certain settings for python_social_auth, then load it."""
    settings = config.get_settings()
    settings['login_providers'] = aslist(settings.get('login_providers', ''))
    settings['trusted_login_providers'] = aslist(
        settings.get('trusted_login_providers', ''))
    if not any(settings['login_providers']):
        log.warning('no login providers configured, double check '
                    'your ini file and add a few')
    for name in ('SOCIAL_AUTH_AUTHENTICATION_BACKENDS',
                 'SOCIAL_AUTH_USER_FIELDS',
                 'SOCIAL_AUTH_PROTECTED_USER_FIELDS',
                 'SOCIAL_AUTH_FIELDS_STORED_IN_SESSION'):
        settings[name] = aslist(settings.get(name, ''))
    for name in ('SOCIAL_AUTH_SAML_ORG_INFO',
                 'SOCIAL_AUTH_SAML_TECHNICAL_CONTACT',
                 'SOCIAL_AUTH_SAML_SUPPORT_CONTACT',
                 'SOCIAL_AUTH_SAML_ENABLED_IDPS'):
        val = settings.get(name, '')
        if val:
            settings[name] = json.loads(val)
    for k in settings.keys():
        if k.startswith("SOCIAL_AUTH_"):
            if k.endswith("_SCOPE"):
                settings[k] = aslist(settings.get(k, ''))
            if k.endswith("_EXTRA_ARGUMENTS"):
                val = settings.get(k, '')
                if val:
                    settings[k] = json.loads(val)
Esempio n. 2
0
    def __init__(self, config_uri, app_name,
                 zk_hosts=None, group=None, topic=None, title=None,
                 sample=False):
        self.config_uri = config_uri
        self.curr_proc = multiprocessing.current_process()
        if title is None and topic is not None:
            title = self.title.format(topic)
        self.curr_proc.name = title

        setup_logging(self.config_uri)
        app_settings = get_appsettings(self.config_uri, name=app_name)
        self.config = Configurator(settings=app_settings)
        settings = self.config.registry.settings
        self.curr_proc.authkey = settings['eyrie.authkey']


        self.msg_count = 0
        self.logger = logging.getLogger('rf.kafka')

        self.commit_interval = int(settings.get('kafka.commit_interval',
                                                self.commit_interval))
        self.commit_greenlet = None
        self.consume_greenlet = None
        self.throughput_greenlet = None
        self.fetch_count = int(settings.get('kafka.fetch_count',
                                            self.fetch_count))

        zk_hosts = settings.get('kafka.zk_hosts', zk_hosts)
        if zk_hosts is None:
            raise ConfigurationError('No ZooKeeper hosts provided')
        group = settings.get('kafka.group', group)
        if group is None:
            raise ConfigurationError('No consumer group provided to join')
        topic = settings.get('kafka.topic', topic)
        if topic is None:
            raise ConfigurationError('No topic provided to consume')
        consumers = settings.get('kafka.consumers', None)
        if consumers:
            consumers = aslist(consumers)

        self.consumer = ZKConsumer(
            zk_hosts,
            group,
            topic,
            zk_handler=SequentialGeventHandler(),
            logger=self.logger,
            buffer_size=int(settings.get('kafka.buffer_size',
                                         self.buffer_size)),
            max_buffer_size=int(settings.get('kafka.max_buffer_size',
                                             self.max_buffer_size)),
            auto_commit=False,
            nodes=consumers,
        )
        self.consumer.zk.add_listener(self.zk_session_watch)

        self.context = zmq.Context()
        self.channel = self.context.socket(zmq.PUB if sample else zmq.PUSH)
        self.channel.connect(self.output.endpoint)
        self.lastSample = time.time()
Esempio n. 3
0
def adjust_settings(settings):
    settings['login_providers'] = aslist(settings.get('login_providers', ''))
    settings['trusted_login_providers'] = aslist(settings.get('trusted_login_providers', ''))
    if not any(settings['login_providers']):
        log.warning('no login providers configured, double check '
                    'your ini file and add a few')
    for k, v in settings.iteritems():
        if k.startswith("SOCIAL_AUTH_"):
            if k.endswith("_SCOPE"):
                settings[k] = aslist(v)
            elif isinstance(v, (str, unicode)) and v.lstrip().startswith('{'):
                settings[k] = json.loads(v)
    for name in ('SOCIAL_AUTH_AUTHENTICATION_BACKENDS',
                 'SOCIAL_AUTH_USER_FIELDS',
                 'SOCIAL_AUTH_PROTECTED_USER_FIELDS',
                 'SOCIAL_AUTH_FIELDS_STORED_IN_SESSION'):
        settings[name] = aslist(settings.get(name, ''))
Esempio n. 4
0
def adjust_settings(settings):
    settings['login_providers'] = aslist(settings.get('login_providers', ''))
    settings['trusted_login_providers'] = aslist(settings.get('trusted_login_providers', ''))
    if not any(settings['login_providers']):
        log.warning('no login providers configured, double check '
                    'your ini file and add a few')
    for k, v in settings.items():
        if k.startswith("SOCIAL_AUTH_"):
            if k.endswith("_SCOPE") or k.endswith("_FIELD_SELECTORS"):
                settings[k] = aslist(v)
            elif isinstance(v, str) and v.lstrip().startswith('{'):
                settings[k] = json.loads(v)
    for name in ('SOCIAL_AUTH_AUTHENTICATION_BACKENDS',
                 'SOCIAL_AUTH_USER_FIELDS',
                 'SOCIAL_AUTH_PROTECTED_USER_FIELDS',
                 'SOCIAL_AUTH_FIELDS_STORED_IN_SESSION'):
        settings[name] = aslist(settings.get(name, ''))
Esempio n. 5
0
def includeme(config):
    origins = aslist(config.registry.settings.get('origins', ''))
    config.registry.websocket = WebSocketWSGIApplication(handler_cls=WebSocket)
    config.registry.websocket_origins = origins
    config.add_route('ws', 'ws')
    config.add_view(websocket, route_name='ws')
    config.add_view(bad_handshake, context=HandshakeError)
    config.scan(__name__)
Esempio n. 6
0
def includeme(config):
    origins = aslist(config.registry.settings.get('origins', ''))
    config.registry.websocket = WebSocketWSGIApplication(handler_cls=WebSocket)
    config.registry.websocket_origins = origins
    config.add_route('ws', 'ws')
    config.add_view(websocket, route_name='ws')
    config.add_view(bad_handshake, context=HandshakeError)
    config.scan(__name__)
Esempio n. 7
0
def includeme(config):
    settings = config.get_settings()
    for key, value in settings.iteritems():
        if not key.startswith('static.'):
            continue
        path, url = aslist(value)
        config.add_static_view(name=url, path=path)
        view_config(route_name=key)(staticRedirectView)
        config.add_route(key, url)
Esempio n. 8
0
def setup_discussion_callbacks(registry):
    """Register all :py:class:`IDiscussionCreationCallback` implementations
    according to ``local.ini``"""
    settings = registry.settings
    class_names = aslist(settings.get('discussion_callbacks', ''))
    for class_name in class_names:
        cls = resolver.resolve(class_name)
        registry.registerUtility(
            cls(), IDiscussionCreationCallback, cls.__name__)
Esempio n. 9
0
def setup_discussion_callbacks(registry):
    """Register all :py:class:`IDiscussionCreationCallback` implementations
    according to ``local.ini``"""
    settings = registry.settings
    class_names = aslist(settings.get('discussion_callbacks', ''))
    for class_name in class_names:
        cls = resolver.resolve(class_name)
        registry.registerUtility(
            cls(), IDiscussionCreationCallback, cls.__name__)
Esempio n. 10
0
def includeme(config):
    # ensure that bowab static assets will be searched out before deform's
    config.add_static_view('bowab_static', 'sixfeetup.bowab:static')
    config.override_asset(to_override='deform:static/',
                          override_with='sixfeetup.bowab:static/')
    config.add_subscriber(register_api, BeforeRender)
    config.add_directive('add_models', add_models)
    model_paths = aslist(config.registry.settings.setdefault('bowab.models',
                                                             set()))
    config.add_models(model_paths)
    config.add_request_method(get_db_session, 'db_session', reify=True)
Esempio n. 11
0
def websocket(request):
    # WebSockets can be opened across origins and send cookies. To prevent
    # scripts on other sites from using this socket, ensure that the Origin
    # header (if present) matches the request host URL or is whitelisted.
    origin = request.headers.get('Origin')
    allowed = aslist(request.registry.settings.get('origins', ''))
    if origin is not None:
        if origin != request.host_url and origin not in allowed:
            return HTTPForbidden()
    app = WebSocketWSGIApplication(handler_cls=WebSocket)
    return request.get_response(app)
Esempio n. 12
0
def websocket(request):
    # WebSockets can be opened across origins and send cookies. To prevent
    # scripts on other sites from using this socket, ensure that the Origin
    # header (if present) matches the request host URL or is whitelisted.
    origin = request.headers.get('Origin')
    allowed = aslist(request.registry.settings.get('origins', ''))
    if origin is not None:
        if origin != request.host_url and origin not in allowed:
            return HTTPForbidden()
    app = WebSocketWSGIApplication(handler_cls=WebSocket)
    return request.get_response(app)
Esempio n. 13
0
def includeme(config):
	"""
	For inclusion by Pyramid.
	"""
	mib_paths = []
	cfg = config.registry.settings

	# Add user-configured MIB paths
	if 'netprofile.devices.mib_paths' in cfg:
		for path in aslist(cfg['netprofile.devices.mib_paths']):
			if os.path.isdir(path):
				mib_paths.append(path)

	# Add path to bundles MIBs
	dist = pkg_resources.get_distribution('netprofile_devices')
	if dist:
		new_path = os.path.join(dist.location, 'netprofile_devices', 'mibs')
		if os.path.isdir(new_path):
			mib_paths.append(new_path)

	if len(mib_paths) > 0:
		cur_path = snimpy.mib.path()
		snimpy.mib.path(':'.join(mib_paths) + ':' + cur_path)
Esempio n. 14
0
def includeme(config):
    resolver = DottedNameResolver()
    settings = config.registry.settings
    names = aslist(settings.get('studies.export.plans') or '')
    settings['studies.export.plans'] = [resolver.resolve(n) for n in names]
Esempio n. 15
0
def _parse_ini_settings(reg, celery):
    settings = reg.settings
    cfg = make_config_dict(settings, 'celery.')
    newconf = {}

    if 'broker' in cfg:
        newconf['BROKER_URL'] = cfg['broker']
    if 'broker.heartbeat' in cfg:
        newconf['BROKER_HEARTBEAT'] = cfg['broker.heartbeat']
    if 'broker.heartbeat.checkrate' in cfg:
        newconf['BROKER_HEARTBEAT_CHECKRATE'] = cfg[
            'broker.heartbeat.checkrate']
    if 'broker.use_ssl' in cfg:
        newconf['BROKER_USE_SSL'] = cfg['broker.use_ssl']
    if 'broker.pool_limit' in cfg:
        newconf['BROKER_POOL_LIMIT'] = cfg['broker.pool_limit']
    if 'broker.connection.timeout' in cfg:
        newconf['BROKER_CONNECTION_TIMEOUT'] = cfg['broker.connection.timeout']
    if 'broker.connection.retry' in cfg:
        newconf['BROKER_CONNECTION_RETRY'] = cfg['broker.connection.retry']
    if 'broker.connection.max_retries' in cfg:
        newconf['BROKER_CONNECTION_MAX_RETRIES'] = cfg[
            'broker.connection.max_retries']
    if 'broker.login_method' in cfg:
        newconf['BROKER_LOGIN_METHOD'] = cfg['broker.login_method']

    if 'backend' in cfg:
        newconf['CELERY_RESULT_BACKEND'] = cfg['backend']
    if 'task_result_expires' in cfg:
        newconf['CELERY_TASK_RESULT_EXPIRES'] = cfg['task_result_expires']
    if 'task_publish_retry' in cfg:
        newconf['CELERY_TASK_PUBLISH_RETRY'] = cfg['task_publish_retry']
    if 'enable_utc' in cfg:
        newconf['CELERY_ENABLE_UTC'] = cfg['enable_utc']
    if 'timezone' in cfg:
        newconf['CELERY_TIMEZONE'] = cfg['timezone']
    if 'always_eager' in cfg:
        newconf['CELERY_ALWAYS_EAGER'] = cfg['always_eager']
    if 'eager_propagates_exceptions' in cfg:
        newconf['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = cfg[
            'eager_propagates_exceptions']
    if 'ignore_result' in cfg:
        newconf['CELERY_IGNORE_RESULT'] = cfg['ignore_result']
    if 'store_errors_even_if_ignored' in cfg:
        newconf['CELERY_STORE_ERRORS_EVEN_IF_IGNORED'] = cfg[
            'store_errors_even_if_ignored']
    if 'message_compression' in cfg:
        newconf['CELERY_MESSAGE_COMPRESSION'] = cfg['message_compression']
    if 'max_cached_results' in cfg:
        newconf['CELERY_MAX_CACHED_RESULTS'] = cfg['max_cached_results']
    if 'chord_propagates' in cfg:
        newconf['CELERY_CHORD_PROPAGATES'] = cfg['chord_propagates']
    if 'track_started' in cfg:
        newconf['CELERY_TRACK_STARTED'] = cfg['track_started']
    if 'default_rate_limit' in cfg:
        newconf['CELERY_DEFAULT_RATE_LIMIT'] = cfg['default_rate_limit']
    if 'disable_rate_limits' in cfg:
        newconf['CELERY_DISABLE_RATE_LIMITS'] = cfg['disable_rate_limits']
    if 'acks_late' in cfg:
        newconf['CELERY_ACKS_LATE'] = cfg['acks_late']

    if 'accept_content' in cfg:
        newconf['CELERY_ACCEPT_CONTENT'] = aslist(cfg['accept_content'])
    else:
        newconf['CELERY_ACCEPT_CONTENT'] = ('msgpack', )

    if 'task_serializer' in cfg:
        newconf['CELERY_TASK_SERIALIZER'] = cfg['task_serializer']
    else:
        newconf['CELERY_TASK_SERIALIZER'] = 'msgpack'
    if 'result_serializer' in cfg:
        newconf['CELERY_RESULT_SERIALIZER'] = cfg['result_serializer']
    else:
        newconf['CELERY_RESULT_SERIALIZER'] = 'msgpack'
    if 'event_serializer' in cfg:
        newconf['CELERY_EVENT_SERIALIZER'] = cfg['event_serializer']
    else:
        newconf['CELERY_EVENT_SERIALIZER'] = 'msgpack'

    if 'result_exchange' in cfg:
        newconf['CELERY_RESULT_EXCHANGE'] = cfg['result_exchange']
    if 'result_exchange_type' in cfg:
        newconf['CELERY_RESULT_EXCHANGE_TYPE'] = cfg['result_exchange_type']
    if 'result_persistent' in cfg:
        newconf['CELERY_RESULT_PERSISTENT'] = cfg['result_persistent']
    if 'routes' in cfg:
        newconf['CELERY_ROUTES'] = aslist(cfg['routes'])
    if 'worker_direct' in cfg:
        newconf['CELERY_WORKER_DIRECT'] = cfg['worker_direct']
    if 'create_missing_queues' in cfg:
        newconf['CELERY_CREATE_MISSING_QUEUES'] = cfg['create_missing_queues']
    if 'enable_remote_control' in cfg:
        newconf['CELERY_ENABLE_REMOTE_CONTROL'] = cfg['enable_remote_control']

    if 'send_task_error_emails' in cfg:
        newconf['CELERY_SEND_TASK_ERROR_EMAILS'] = cfg[
            'send_task_error_emails']


#	if 'admins' in cfg:
#		FIXME: list of tuples
    if 'server_email' in cfg:
        newconf['SERVER_EMAIL'] = cfg['server_email']
    if 'email_host' in cfg:
        newconf['EMAIL_HOST'] = cfg['email_host']
    if 'email_host_user' in cfg:
        newconf['EMAIL_HOST_USER'] = cfg['email_host_user']
    if 'email_host_password' in cfg:
        newconf['EMAIL_HOST_PASSWORD'] = cfg['email_host_password']
    if 'email_port' in cfg:
        newconf['EMAIL_PORT'] = cfg['email_port']
    if 'email_use_ssl' in cfg:
        newconf['EMAIL_USE_SSL'] = cfg['email_use_ssl']
    if 'email_use_tls' in cfg:
        newconf['EMAIL_USE_TLS'] = cfg['email_use_tls']
    if 'email_timeout' in cfg:
        newconf['EMAIL_TIMEOUT'] = cfg['email_timeout']

    if 'send_events' in cfg:
        newconf['CELERY_SEND_EVENTS'] = cfg['send_events']
    if 'send_task_sent_event' in cfg:
        newconf['CELERY_SEND_TASK_SENT_EVENT'] = cfg['send_task_sent_event']
    if 'event_queue_ttl' in cfg:
        newconf['CELERY_EVENT_QUEUE_TTL'] = cfg['event_queue_ttl']
    if 'event_queue_expires' in cfg:
        newconf['CELERY_EVENT_QUEUE_EXPIRES'] = cfg['event_queue_expires']
    if 'redirect_stdouts' in cfg:
        newconf['CELERY_REDIRECT_STDOUTS'] = cfg['redirect_stdouts']
    if 'redirect_stdouts_level' in cfg:
        newconf['CELERY_REDIRECT_STDOUTS_LEVEL'] = cfg[
            'redirect_stdouts_level']

    if 'queue_ha_policy' in cfg:
        qhp = aslist(cfg['queue_ha_policy'])
        if len(qhp) == 1:
            qhp = qhp[0]
        newconf['CELERY_QUEUE_HA_POLICY'] = qhp

    if 'security_key' in cfg:
        newconf['CELERY_SECURITY_KEY'] = cfg['security_key']
    if 'security_certificate' in cfg:
        newconf['CELERY_SECURITY_CERTIFICATE'] = cfg['security_certificate']
    if 'security_cert_store' in cfg:
        newconf['CELERY_SECURITY_CERT_STORE'] = cfg['security_cert_store']

    if 'default_queue' in cfg:
        newconf['CELERY_DEFAULT_QUEUE'] = cfg['default_queue']
    if 'default_exchange' in cfg:
        newconf['CELERY_DEFAULT_EXCHANGE'] = cfg['default_exchange']
    if 'default_exchange_type' in cfg:
        newconf['CELERY_DEFAULT_EXCHANGE_TYPE'] = cfg['default_exchange_type']
    if 'default_routing_key' in cfg:
        newconf['CELERY_DEFAULT_ROUTING_KEY'] = cfg['default_routing_key']
    if 'default_delivery_mode' in cfg:
        newconf['CELERY_DEFAULT_DELIVERY_MODE'] = cfg['default_delivery_mode']

    if 'concurrency' in cfg:
        newconf['CELERYD_CONCURRENCY'] = cfg['concurrency']
    if 'prefetch_multiplier' in cfg:
        newconf['CELERYD_PREFETCH_MULTIPLIER'] = cfg['prefetch_multiplier']
    if 'force_execv' in cfg:
        newconf['CELERYD_FORCE_EXECV'] = cfg['force_execv']
    if 'worker_lost_wait' in cfg:
        newconf['CELERYD_WORKER_LOST_WAIT'] = cfg['worker_lost_wait']
    if 'max_tasks_per_child' in cfg:
        newconf['CELERYD_MAX_TASKS_PER_CHILD'] = cfg['max_tasks_per_child']
    if 'task_time_limit' in cfg:
        newconf['CELERYD_TASK_TIME_LIMIT'] = cfg['task_time_limit']
    if 'task_soft_time_limit' in cfg:
        newconf['CELERYD_TASK_SOFT_TIME_LIMIT'] = cfg['task_soft_time_limit']
    if 'state_db' in cfg:
        newconf['CELERYD_STATE_DB'] = cfg['state_db']
    if 'timer_precision' in cfg:
        newconf['CELERYD_TIMER_PRECISION'] = cfg['timer_precision']
    if 'hijack_root_logger' in cfg:
        newconf['CELERYD_HIJACK_ROOT_LOGGER'] = cfg['hijack_root_logger']
    if 'log_color' in cfg:
        newconf['CELERYD_LOG_COLOR'] = cfg['log_color']
    if 'log_format' in cfg:
        newconf['CELERYD_LOG_FORMAT'] = cfg['log_format']
    if 'task_log_format' in cfg:
        newconf['CELERYD_TASK_LOG_FORMAT'] = cfg['task_log_format']
    if 'pool' in cfg:
        newconf['CELERYD_POOL'] = cfg['pool']
    if 'pool_restarts' in cfg:
        newconf['CELERYD_POOL_RESTARTS'] = cfg['pool_restarts']
    if 'autoscaler' in cfg:
        newconf['CELERYD_AUTOSCALER'] = cfg['autoscaler']
    if 'autoreloader' in cfg:
        newconf['CELERYD_AUTORELOADER'] = cfg['autoreloader']
    if 'consumer' in cfg:
        newconf['CELERYD_CONSUMER'] = cfg['consumer']
    if 'timer' in cfg:
        newconf['CELERYD_TIMER'] = cfg['timer']

    if 'celerymon_log_format' in cfg:
        newconf['CELERYMON_LOG_FORMAT'] = cfg['celerymon_log_format']

    if 'broadcast_queue' in cfg:
        newconf['CELERY_BROADCAST_QUEUE'] = cfg['broadcast_queue']
    if 'broadcast_exchange' in cfg:
        newconf['CELERY_BROADCAST_EXCHANGE'] = cfg['broadcast_exchange']
    if 'broadcast_exchange_type' in cfg:
        newconf['CELERY_BROADCAST_EXCHANGE_TYPE'] = cfg[
            'broadcast_exchange_type']

    if 'scheduler' in cfg:
        newconf['CELERYBEAT_SCHEDULER'] = cfg['scheduler']
    if 'schedule_filename' in cfg:
        newconf['CELERYBEAT_SCHEDULE_FILENAME'] = cfg['schedule_filename']
    if 'sync_every' in cfg:
        newconf['CELERYBEAT_SYNC_EVERY'] = cfg['sync_every']
    if 'max_loop_interval' in cfg:
        newconf['CELERYBEAT_MAX_LOOP_INTERVAL'] = cfg['max_loop_interval']

    # FIXME: complex python values!
    opts = make_config_dict(cfg, 'schedule.')
    if len(opts) > 0:
        newconf['CELERYBEAT_SCHEDULE'] = as_dict(opts)

    if 'redis_max_connections' in cfg:
        newconf['CELERY_REDIS_MAX_CONNECTIONS'] = cfg['redis_max_connections']

    opts = make_config_dict(cfg, 'broker.transport_options.')
    if len(opts) > 0:
        newconf['BROKER_TRANSPORT_OPTIONS'] = opts

    opts = make_config_dict(cfg, 'task_publish_retry_policy.')
    if len(opts) > 0:
        newconf['CELERY_TASK_PUBLISH_RETRY_POLICY'] = opts

    opts = make_config_dict(cfg, 'result_tables.')
    if len(opts) > 0:
        newconf['CELERY_RESULT_DB_TABLENAMES'] = opts

    opts = make_config_dict(cfg, 'result_options.')
    if len(opts) > 0:
        newconf['CELERY_RESULT_ENGINE_OPTIONS'] = opts

    opts = make_config_dict(cfg, 'cache_options.')
    if len(opts) > 0:
        newconf['CELERY_CACHE_BACKEND_OPTIONS'] = opts

    opts = make_config_dict(cfg, 'mongodb_options.')
    if len(opts) > 0:
        newconf['CELERY_MONGODB_BACKEND_SETTINGS'] = opts

    cass = make_config_dict(cfg, 'cassandra.')
    if 'servers' in cass:
        newconf['CASSANDRA_SERVERS'] = aslist(cass['servers'])
    if 'keyspace' in cass:
        newconf['CASSANDRA_KEYSPACE'] = cass['keyspace']
    if 'column_family' in cass:
        newconf['CASSANDRA_COLUMN_FAMILY'] = cass['column_family']
    if 'read_consistency' in cass:
        newconf['CASSANDRA_READ_CONSISTENCY'] = cass['read_consistency']
    if 'write_consistency' in cass:
        newconf['CASSANDRA_WRITE_CONSISTENCY'] = cass['write_consistency']
    if 'detailed_mode' in cass:
        newconf['CASSANDRA_DETAILED_MODE'] = cass['detailed_mode']
    opts = make_config_dict(cass, 'options.')
    if len(opts) > 0:
        newconf['CASSANDRA_OPTIONS'] = opts

    opts = make_config_dict(cfg, 'queues.')
    if len(opts) > 0:
        newconf['CELERY_QUEUES'] = opts
    else:
        newconf['CELERY_QUEUES'] = _default_queues

    mm = reg.getUtility(IModuleManager)

    opts = []
    if 'imports' in cfg:
        opts = aslist(cfg['imports'])
    for imp in mm.get_task_imports():
        opts.append(imp)
    if len(opts) > 0:
        newconf['CELERY_IMPORTS'] = opts

    opts = []
    if 'include' in cfg:
        opts = aslist(cfg['include'])
    # FIXME: hook module include here (?)
    if len(opts) > 0:
        newconf['CELERY_INCLUDE'] = opts

    if len(newconf) > 0:
        if isinstance(celery, Celery):
            celery.config_from_object(newconf)
        else:
            celery.update(newconf)
Esempio n. 16
0
def includeme(config):
    resolver = DottedNameResolver()
    settings = config.registry.settings
    names = aslist(settings.get('studies.export.plans') or '')
    settings['studies.export.plans'] = [resolver.resolve(n) for n in names]
Esempio n. 17
0
    def __init__(self, config_uri, app_name,
                 zk_hosts=None, group=None, topic=None, title=None,
                 sample=False):
        self._terminate = False
        self.config_uri = config_uri
        self.curr_proc = multiprocessing.current_process()
        if title is None and topic is not None:
            title = self.title.format(topic)
        self.curr_proc.name = title

        setup_logging(self.config_uri)
        app_settings = get_appsettings(self.config_uri, name=app_name)
        self.config = Configurator(settings=app_settings)
        settings = self.config.registry.settings
        self.curr_proc.authkey = settings['eyrie.authkey']


        self.msg_count = 0
        self.logger = logging.getLogger('rf.kafka')

        self.commit_interval = int(settings.get('kafka.commit_interval',
                                                self.commit_interval))
        self.commit_greenlet = None
        self.consume_greenlet = None
        self.throughput_greenlet = None
        self.fetch_count = int(settings.get('kafka.fetch_count',
                                            self.fetch_count))

        zk_hosts = settings.get('kafka.zk_hosts', zk_hosts)
        if zk_hosts is None:
            raise ConfigurationError('No ZooKeeper hosts provided')
        group = settings.get('kafka.group', group)
        if group is None:
            raise ConfigurationError('No consumer group provided to join')
        topic = settings.get('kafka.topic', topic)
        if topic is None:
            raise ConfigurationError('No topic provided to consume')
        consumers = settings.get('kafka.consumers', None)
        if consumers:
            consumers = aslist(consumers)

        self.consumer = ZKConsumer(
            zk_hosts,
            group,
            topic,
            zk_handler=SequentialGeventHandler(),
            logger=self.logger,
            buffer_size=int(settings.get('kafka.buffer_size',
                                         self.buffer_size)),
            max_buffer_size=int(settings.get('kafka.max_buffer_size',
                                             self.max_buffer_size)),
            auto_commit=False,
            nodes=consumers,
            identifier='{}-{}'.format(socket.getfqdn(),
                                      title.replace('(', '').replace(')', '')),
        )
        self.consumer.zk.add_listener(self.zk_session_watch)

        self.context = zmq.Context()
        self.channel = self.context.socket(zmq.PUB if sample else zmq.PUSH)
        self.channel.connect(self.output.endpoint)
        self.lastSample = time.time()
Esempio n. 18
0
def configure(settings):
    """Return a Configurator for the Pyramid application."""
    # Settings from the config file are extended / overwritten by settings from
    # the environment.
    env_settings = {
        # The URL of the https://github.com/hypothesis/via instance to
        # integrate with.
        "via_url": env_setting("VIA_URL", required=True),
        "jwt_secret": env_setting("JWT_SECRET", required=True),
        "google_client_id": env_setting("GOOGLE_CLIENT_ID"),
        "google_developer_key": env_setting("GOOGLE_DEVELOPER_KEY"),
        "google_app_id": env_setting("GOOGLE_APP_ID"),
        "lms_secret": env_setting("LMS_SECRET"),
        "hashed_pw": env_setting("HASHED_PW"),
        "salt": env_setting("SALT"),
        "username": env_setting("USERNAME"),
        # We need to use a randomly generated 16 byte array to encrypt secrets.
        # For now we will use the first 16 bytes of the lms_secret
        "aes_secret": env_setting("LMS_SECRET", required=True),
        # The OAuth 2.0 client_id and client_secret for authenticating to the h API.
        "h_client_id": env_setting("H_CLIENT_ID", required=True),
        "h_client_secret": env_setting("H_CLIENT_SECRET", required=True),
        # The OAuth 2.0 client_id and client_secret for logging users in to h.
        "h_jwt_client_id": env_setting("H_JWT_CLIENT_ID", required=True),
        "h_jwt_client_secret": env_setting("H_JWT_CLIENT_SECRET",
                                           required=True),
        # The authority that we'll create h users and groups in (e.g. "lms.hypothes.is").
        "h_authority": env_setting("H_AUTHORITY", required=True),
        # The base URL of the h API (e.g. "https://hypothes.is/api).
        "h_api_url": env_setting("H_API_URL", required=True),
        # The postMessage origins from which to accept RPC requests.
        "rpc_allowed_origins": env_setting("RPC_ALLOWED_ORIGINS",
                                           required=True),
    }

    database_url = env_setting("DATABASE_URL")
    if database_url:
        env_settings["sqlalchemy.url"] = database_url

    env_settings["via_url"] = _append_trailing_slash(env_settings["via_url"])
    env_settings["h_api_url"] = _append_trailing_slash(
        env_settings["h_api_url"])

    try:
        env_settings["aes_secret"] = env_settings["aes_secret"].encode(
            "ascii")[0:16]
    except UnicodeEncodeError:
        raise SettingError("LMS_SECRET must contain only ASCII characters")

    env_settings["rpc_allowed_origins"] = aslist(
        env_settings["rpc_allowed_origins"])

    settings.update(env_settings)

    config = Configurator(settings=settings, root_factory=".resources.Root")

    # Security policies
    authn_policy = AuthTktAuthenticationPolicy(settings["lms_secret"],
                                               callback=groupfinder,
                                               hashalg="sha512")
    authz_policy = ACLAuthorizationPolicy()
    config.set_authentication_policy(authn_policy)
    config.set_authorization_policy(authz_policy)

    return config
Esempio n. 19
0
def _parse_ini_settings(reg, celery):
	settings = reg.settings
	cfg = make_config_dict(settings, 'celery.')
	newconf = {}

	if 'broker' in cfg:
		newconf['BROKER_URL'] = cfg['broker']
	if 'broker.heartbeat' in cfg:
		newconf['BROKER_HEARTBEAT'] = cfg['broker.heartbeat']
	if 'broker.heartbeat.checkrate' in cfg:
		newconf['BROKER_HEARTBEAT_CHECKRATE'] = cfg['broker.heartbeat.checkrate']
	if 'broker.use_ssl' in cfg:
		newconf['BROKER_USE_SSL'] = cfg['broker.use_ssl']
	if 'broker.pool_limit' in cfg:
		newconf['BROKER_POOL_LIMIT'] = cfg['broker.pool_limit']
	if 'broker.connection.timeout' in cfg:
		newconf['BROKER_CONNECTION_TIMEOUT'] = cfg['broker.connection.timeout']
	if 'broker.connection.retry' in cfg:
		newconf['BROKER_CONNECTION_RETRY'] = cfg['broker.connection.retry']
	if 'broker.connection.max_retries' in cfg:
		newconf['BROKER_CONNECTION_MAX_RETRIES'] = cfg['broker.connection.max_retries']
	if 'broker.login_method' in cfg:
		newconf['BROKER_LOGIN_METHOD'] = cfg['broker.login_method']

	if 'backend' in cfg:
		newconf['CELERY_RESULT_BACKEND'] = cfg['backend']
	if 'task_result_expires' in cfg:
		newconf['CELERY_TASK_RESULT_EXPIRES'] = cfg['task_result_expires']
	if 'task_publish_retry' in cfg:
		newconf['CELERY_TASK_PUBLISH_RETRY'] = cfg['task_publish_retry']
	if 'enable_utc' in cfg:
		newconf['CELERY_ENABLE_UTC'] = cfg['enable_utc']
	if 'timezone' in cfg:
		newconf['CELERY_TIMEZONE'] = cfg['timezone']
	if 'always_eager' in cfg:
		newconf['CELERY_ALWAYS_EAGER'] = cfg['always_eager']
	if 'eager_propagates_exceptions' in cfg:
		newconf['CELERY_EAGER_PROPAGATES_EXCEPTIONS'] = cfg['eager_propagates_exceptions']
	if 'ignore_result' in cfg:
		newconf['CELERY_IGNORE_RESULT'] = cfg['ignore_result']
	if 'store_errors_even_if_ignored' in cfg:
		newconf['CELERY_STORE_ERRORS_EVEN_IF_IGNORED'] = cfg['store_errors_even_if_ignored']
	if 'message_compression' in cfg:
		newconf['CELERY_MESSAGE_COMPRESSION'] = cfg['message_compression']
	if 'max_cached_results' in cfg:
		newconf['CELERY_MAX_CACHED_RESULTS'] = cfg['max_cached_results']
	if 'chord_propagates' in cfg:
		newconf['CELERY_CHORD_PROPAGATES'] = cfg['chord_propagates']
	if 'track_started' in cfg:
		newconf['CELERY_TRACK_STARTED'] = cfg['track_started']
	if 'default_rate_limit' in cfg:
		newconf['CELERY_DEFAULT_RATE_LIMIT'] = cfg['default_rate_limit']
	if 'disable_rate_limits' in cfg:
		newconf['CELERY_DISABLE_RATE_LIMITS'] = cfg['disable_rate_limits']
	if 'acks_late' in cfg:
		newconf['CELERY_ACKS_LATE'] = cfg['acks_late']

	if 'accept_content' in cfg:
		newconf['CELERY_ACCEPT_CONTENT'] = aslist(cfg['accept_content'])
	else:
		newconf['CELERY_ACCEPT_CONTENT'] = ('msgpack',)

	if 'task_serializer' in cfg:
		newconf['CELERY_TASK_SERIALIZER'] = cfg['task_serializer']
	else:
		newconf['CELERY_TASK_SERIALIZER'] = 'msgpack'
	if 'result_serializer' in cfg:
		newconf['CELERY_RESULT_SERIALIZER'] = cfg['result_serializer']
	else:
		newconf['CELERY_RESULT_SERIALIZER'] = 'msgpack'
	if 'event_serializer' in cfg:
		newconf['CELERY_EVENT_SERIALIZER'] = cfg['event_serializer']
	else:
		newconf['CELERY_EVENT_SERIALIZER'] = 'msgpack'

	if 'result_exchange' in cfg:
		newconf['CELERY_RESULT_EXCHANGE'] = cfg['result_exchange']
	if 'result_exchange_type' in cfg:
		newconf['CELERY_RESULT_EXCHANGE_TYPE'] = cfg['result_exchange_type']
	if 'result_persistent' in cfg:
		newconf['CELERY_RESULT_PERSISTENT'] = cfg['result_persistent']
	if 'routes' in cfg:
		newconf['CELERY_ROUTES'] = aslist(cfg['routes'])
	if 'worker_direct' in cfg:
		newconf['CELERY_WORKER_DIRECT'] = cfg['worker_direct']
	if 'create_missing_queues' in cfg:
		newconf['CELERY_CREATE_MISSING_QUEUES'] = cfg['create_missing_queues']
	if 'enable_remote_control' in cfg:
		newconf['CELERY_ENABLE_REMOTE_CONTROL'] = cfg['enable_remote_control']

	if 'send_task_error_emails' in cfg:
		newconf['CELERY_SEND_TASK_ERROR_EMAILS'] = cfg['send_task_error_emails']
#	if 'admins' in cfg:
#		FIXME: list of tuples
	if 'server_email' in cfg:
		newconf['SERVER_EMAIL'] = cfg['server_email']
	if 'email_host' in cfg:
		newconf['EMAIL_HOST'] = cfg['email_host']
	if 'email_host_user' in cfg:
		newconf['EMAIL_HOST_USER'] = cfg['email_host_user']
	if 'email_host_password' in cfg:
		newconf['EMAIL_HOST_PASSWORD'] = cfg['email_host_password']
	if 'email_port' in cfg:
		newconf['EMAIL_PORT'] = cfg['email_port']
	if 'email_use_ssl' in cfg:
		newconf['EMAIL_USE_SSL'] = cfg['email_use_ssl']
	if 'email_use_tls' in cfg:
		newconf['EMAIL_USE_TLS'] = cfg['email_use_tls']
	if 'email_timeout' in cfg:
		newconf['EMAIL_TIMEOUT'] = cfg['email_timeout']

	if 'send_events' in cfg:
		newconf['CELERY_SEND_EVENTS'] = cfg['send_events']
	if 'send_task_sent_event' in cfg:
		newconf['CELERY_SEND_TASK_SENT_EVENT'] = cfg['send_task_sent_event']
	if 'event_queue_ttl' in cfg:
		newconf['CELERY_EVENT_QUEUE_TTL'] = cfg['event_queue_ttl']
	if 'event_queue_expires' in cfg:
		newconf['CELERY_EVENT_QUEUE_EXPIRES'] = cfg['event_queue_expires']
	if 'redirect_stdouts' in cfg:
		newconf['CELERY_REDIRECT_STDOUTS'] = cfg['redirect_stdouts']
	if 'redirect_stdouts_level' in cfg:
		newconf['CELERY_REDIRECT_STDOUTS_LEVEL'] = cfg['redirect_stdouts_level']

	if 'queue_ha_policy' in cfg:
		qhp = aslist(cfg['queue_ha_policy'])
		if len(qhp) == 1:
			qhp = qhp[0]
		newconf['CELERY_QUEUE_HA_POLICY'] = qhp

	if 'security_key' in cfg:
		newconf['CELERY_SECURITY_KEY'] = cfg['security_key']
	if 'security_certificate' in cfg:
		newconf['CELERY_SECURITY_CERTIFICATE'] = cfg['security_certificate']
	if 'security_cert_store' in cfg:
		newconf['CELERY_SECURITY_CERT_STORE'] = cfg['security_cert_store']

	if 'default_queue' in cfg:
		newconf['CELERY_DEFAULT_QUEUE'] = cfg['default_queue']
	if 'default_exchange' in cfg:
		newconf['CELERY_DEFAULT_EXCHANGE'] = cfg['default_exchange']
	if 'default_exchange_type' in cfg:
		newconf['CELERY_DEFAULT_EXCHANGE_TYPE'] = cfg['default_exchange_type']
	if 'default_routing_key' in cfg:
		newconf['CELERY_DEFAULT_ROUTING_KEY'] = cfg['default_routing_key']
	if 'default_delivery_mode' in cfg:
		newconf['CELERY_DEFAULT_DELIVERY_MODE'] = cfg['default_delivery_mode']

	if 'concurrency' in cfg:
		newconf['CELERYD_CONCURRENCY'] = cfg['concurrency']
	if 'prefetch_multiplier' in cfg:
		newconf['CELERYD_PREFETCH_MULTIPLIER'] = cfg['prefetch_multiplier']
	if 'force_execv' in cfg:
		newconf['CELERYD_FORCE_EXECV'] = cfg['force_execv']
	if 'worker_lost_wait' in cfg:
		newconf['CELERYD_WORKER_LOST_WAIT'] = cfg['worker_lost_wait']
	if 'max_tasks_per_child' in cfg:
		newconf['CELERYD_MAX_TASKS_PER_CHILD'] = cfg['max_tasks_per_child']
	if 'task_time_limit' in cfg:
		newconf['CELERYD_TASK_TIME_LIMIT'] = cfg['task_time_limit']
	if 'task_soft_time_limit' in cfg:
		newconf['CELERYD_TASK_SOFT_TIME_LIMIT'] = cfg['task_soft_time_limit']
	if 'state_db' in cfg:
		newconf['CELERYD_STATE_DB'] = cfg['state_db']
	if 'timer_precision' in cfg:
		newconf['CELERYD_TIMER_PRECISION'] = cfg['timer_precision']
	if 'hijack_root_logger' in cfg:
		newconf['CELERYD_HIJACK_ROOT_LOGGER'] = cfg['hijack_root_logger']
	if 'log_color' in cfg:
		newconf['CELERYD_LOG_COLOR'] = cfg['log_color']
	if 'log_format' in cfg:
		newconf['CELERYD_LOG_FORMAT'] = cfg['log_format']
	if 'task_log_format' in cfg:
		newconf['CELERYD_TASK_LOG_FORMAT'] = cfg['task_log_format']
	if 'pool' in cfg:
		newconf['CELERYD_POOL'] = cfg['pool']
	if 'pool_restarts' in cfg:
		newconf['CELERYD_POOL_RESTARTS'] = cfg['pool_restarts']
	if 'autoscaler' in cfg:
		newconf['CELERYD_AUTOSCALER'] = cfg['autoscaler']
	if 'autoreloader' in cfg:
		newconf['CELERYD_AUTORELOADER'] = cfg['autoreloader']
	if 'consumer' in cfg:
		newconf['CELERYD_CONSUMER'] = cfg['consumer']
	if 'timer' in cfg:
		newconf['CELERYD_TIMER'] = cfg['timer']

	if 'celerymon_log_format' in cfg:
		newconf['CELERYMON_LOG_FORMAT'] = cfg['celerymon_log_format']

	if 'broadcast_queue' in cfg:
		newconf['CELERY_BROADCAST_QUEUE'] = cfg['broadcast_queue']
	if 'broadcast_exchange' in cfg:
		newconf['CELERY_BROADCAST_EXCHANGE'] = cfg['broadcast_exchange']
	if 'broadcast_exchange_type' in cfg:
		newconf['CELERY_BROADCAST_EXCHANGE_TYPE'] = cfg['broadcast_exchange_type']

	if 'scheduler' in cfg:
		newconf['CELERYBEAT_SCHEDULER'] = cfg['scheduler']
	if 'schedule_filename' in cfg:
		newconf['CELERYBEAT_SCHEDULE_FILENAME'] = cfg['schedule_filename']
	if 'sync_every' in cfg:
		newconf['CELERYBEAT_SYNC_EVERY'] = cfg['sync_every']
	if 'max_loop_interval' in cfg:
		newconf['CELERYBEAT_MAX_LOOP_INTERVAL'] = cfg['max_loop_interval']

	# FIXME: complex python values!
	opts = make_config_dict(cfg, 'schedule.')
	if len(opts) > 0:
		newconf['CELERYBEAT_SCHEDULE'] = as_dict(opts)

	if 'redis_max_connections' in cfg:
		newconf['CELERY_REDIS_MAX_CONNECTIONS'] = cfg['redis_max_connections']

	opts = make_config_dict(cfg, 'broker.transport_options.')
	if len(opts) > 0:
		newconf['BROKER_TRANSPORT_OPTIONS'] = opts

	opts = make_config_dict(cfg, 'task_publish_retry_policy.')
	if len(opts) > 0:
		newconf['CELERY_TASK_PUBLISH_RETRY_POLICY'] = opts

	opts = make_config_dict(cfg, 'result_tables.')
	if len(opts) > 0:
		newconf['CELERY_RESULT_DB_TABLENAMES'] = opts

	opts = make_config_dict(cfg, 'result_options.')
	if len(opts) > 0:
		newconf['CELERY_RESULT_ENGINE_OPTIONS'] = opts

	opts = make_config_dict(cfg, 'cache_options.')
	if len(opts) > 0:
		newconf['CELERY_CACHE_BACKEND_OPTIONS'] = opts

	opts = make_config_dict(cfg, 'mongodb_options.')
	if len(opts) > 0:
		newconf['CELERY_MONGODB_BACKEND_SETTINGS'] = opts

	cass = make_config_dict(cfg, 'cassandra.')
	if 'servers' in cass:
		newconf['CASSANDRA_SERVERS'] = aslist(cass['servers'])
	if 'keyspace' in cass:
		newconf['CASSANDRA_KEYSPACE'] = cass['keyspace']
	if 'column_family' in cass:
		newconf['CASSANDRA_COLUMN_FAMILY'] = cass['column_family']
	if 'read_consistency' in cass:
		newconf['CASSANDRA_READ_CONSISTENCY'] = cass['read_consistency']
	if 'write_consistency' in cass:
		newconf['CASSANDRA_WRITE_CONSISTENCY'] = cass['write_consistency']
	if 'detailed_mode' in cass:
		newconf['CASSANDRA_DETAILED_MODE'] = cass['detailed_mode']
	opts = make_config_dict(cass, 'options.')
	if len(opts) > 0:
		newconf['CASSANDRA_OPTIONS'] = opts

	opts = make_config_dict(cfg, 'queues.')
	if len(opts) > 0:
		newconf['CELERY_QUEUES'] = opts
	else:
		newconf['CELERY_QUEUES'] = _default_queues

	mm = reg.getUtility(IModuleManager)

	opts = []
	if 'imports' in cfg:
		opts = aslist(cfg['imports'])
	for imp in mm.get_task_imports():
		opts.append(imp)
	if len(opts) > 0:
		newconf['CELERY_IMPORTS'] = opts

	opts = []
	if 'include' in cfg:
		opts = aslist(cfg['include'])
	# FIXME: hook module include here (?)
	if len(opts) > 0:
		newconf['CELERY_INCLUDE'] = opts

	if len(newconf) > 0:
		if isinstance(celery, Celery):
			celery.config_from_object(newconf)
		else:
			celery.update(newconf)
Esempio n. 20
0
def configure(settings):
    """Return a Configurator for the Pyramid application."""
    sg = SettingGetter(settings)  # pylint:disable=invalid-name

    env_settings = {
        # Whether or not we're in "dev" mode (as opposed to QA, production or tests).
        "dev":
        sg.get("DEV", default=False),
        # The URL of the https://github.com/hypothesis/via instance to
        # integrate with.
        "via_url":
        sg.get("VIA_URL"),
        "legacy_via_url":
        sg.get("LEGACY_VIA_URL"),
        "jwt_secret":
        sg.get("JWT_SECRET"),
        "google_client_id":
        sg.get("GOOGLE_CLIENT_ID"),
        "google_developer_key":
        sg.get("GOOGLE_DEVELOPER_KEY"),
        "google_app_id":
        sg.get("GOOGLE_APP_ID"),
        "lms_secret":
        sg.get("LMS_SECRET"),
        "hashed_pw":
        sg.get("HASHED_PW"),
        "salt":
        sg.get("SALT"),
        "username":
        sg.get("USERNAME"),
        # The secret string that's used to sign the session cookie.
        # This needs to be a 64-byte, securely generated random string.
        # For example you can generate one using Python 3 on the command line
        # like this:
        #     python3 -c 'import secrets; print(secrets.token_hex(nbytes=64))'
        "session_cookie_secret":
        sg.get("SESSION_COOKIE_SECRET"),
        # We need to use a randomly generated 16 byte array to encrypt secrets.
        # For now we will use the first 16 bytes of the lms_secret
        "aes_secret":
        sg.get("LMS_SECRET"),
        # The OAuth 2.0 client_id and client_secret for authenticating to the h API.
        "h_client_id":
        sg.get("H_CLIENT_ID"),
        "h_client_secret":
        sg.get("H_CLIENT_SECRET"),
        # The OAuth 2.0 client_id and client_secret for logging users in to h.
        "h_jwt_client_id":
        sg.get("H_JWT_CLIENT_ID"),
        "h_jwt_client_secret":
        sg.get("H_JWT_CLIENT_SECRET"),
        # The authority that we'll create h users and groups in (e.g. "lms.hypothes.is").
        "h_authority":
        sg.get("H_AUTHORITY"),
        # The public base URL of the h API (e.g. "https://hypothes.is/api).
        "h_api_url_public":
        sg.get("H_API_URL_PUBLIC"),
        # A private (within-VPC) URL for the same h API. Faster and more secure
        # than the public one. This is used for internal server-to-server
        # comms.
        "h_api_url_private":
        sg.get("H_API_URL_PRIVATE"),
        # The postMessage origins from which to accept RPC requests.
        "rpc_allowed_origins":
        sg.get("RPC_ALLOWED_ORIGINS"),
        # The secret string that's used to sign the feature flags cookie.
        # For example you can generate one using Python 3 on the command line
        # like this:
        #     python3 -c 'import secrets; print(secrets.token_hex())'
        "feature_flags_cookie_secret":
        sg.get("FEATURE_FLAGS_COOKIE_SECRET"),
        # The list of feature flags that are allowed to be set in the feature flags cookie.
        "feature_flags_allowed_in_cookie":
        sg.get("FEATURE_FLAGS_ALLOWED_IN_COOKIE"),
        # The secret string that's used to sign the OAuth 2 state param.
        # For example you can generate one using Python 3 on the command line
        # like this:
        #     python3 -c 'import secrets; print(secrets.token_hex())'
        "oauth2_state_secret":
        sg.get("OAUTH2_STATE_SECRET"),
    }

    env_settings["dev"] = asbool(env_settings["dev"])

    database_url = sg.get("DATABASE_URL")
    if database_url:
        env_settings["sqlalchemy.url"] = database_url

    env_settings["via_url"] = _append_trailing_slash(env_settings["via_url"])
    env_settings["legacy_via_url"] = _append_trailing_slash(
        env_settings["legacy_via_url"])
    env_settings["h_api_url_public"] = _append_trailing_slash(
        env_settings["h_api_url_public"])
    env_settings["h_api_url_private"] = _append_trailing_slash(
        env_settings["h_api_url_private"])

    if env_settings.get("aes_secret"):
        try:
            env_settings["aes_secret"] = env_settings["aes_secret"].encode(
                "ascii")[0:16]
        except UnicodeEncodeError:
            raise SettingError("LMS_SECRET must contain only ASCII characters")

    env_settings["rpc_allowed_origins"] = aslist(
        env_settings["rpc_allowed_origins"])

    settings.update(env_settings)

    config = Configurator(settings=settings,
                          root_factory="lms.resources.DefaultResource")

    # Security policies
    authz_policy = ACLAuthorizationPolicy()
    config.set_authorization_policy(authz_policy)

    return config
Esempio n. 21
0
def includeme(config):
    settings = config.registry.settings
    settings['origins'] = aslist(settings.get('origins', ''))

    config.scan(__name__)
Esempio n. 22
0
def includeme(config):
    settings = config.registry.settings
    settings['origins'] = aslist(settings.get('origins', ''))

    config.scan(__name__)