Exemple #1
0
class ConfigureInstanceCommand(InstanceCommand):
    """Configure instance.

    <instance>
      identifier of the instance to configure.
    """
    name = 'configure'
    actionverb = 'configured'

    options = merge_options(
        InstanceCommand.options + (
            ('param',
             {'short': 'p', 'type': 'named', 'metavar': 'key1:value1,key2:value2',
              'default': None,
              'help': 'set <key> to <value> in configuration file.'}),
        ),
    )

    def configure_instance(self, appid):
        if self.config.param is not None:
            appcfg = self.cwconfig
            for key, value in self.config.param.items():
                try:
                    appcfg.global_set_option(key, value)
                except KeyError:
                    raise ConfigurationError(
                        'unknown configuration key "%s" for mode %s' % (key, appcfg.name))
            appcfg.save()
Exemple #2
0
class CubicWebPyramidConfiguration(BaseWebConfiguration, ServerConfiguration):
    """Pyramid application with a CubicWeb repository"""
    name = 'pyramid'

    cubicweb_appobject_path = (BaseWebConfiguration.cubicweb_appobject_path
                               | ServerConfiguration.cubicweb_appobject_path)
    cube_appobject_path = (BaseWebConfiguration.cube_appobject_path
                           | ServerConfiguration.cube_appobject_path)

    options = merge_options(ServerConfiguration.options +
                            BaseWebConfiguration.options)

    def init_log(self, *args, **kwargs):
        """Rely on logging configuration in Pyramid's .ini file, do nothing
        here.
        """

    def write_development_ini(self, cubes):
        """Write a 'development.ini' file into apphome."""
        template_fpath = path.join(path.dirname(__file__),
                                   'development.ini.tmpl')
        target_fpath = path.join(self.apphome, 'development.ini')
        context = {
            'instance': self.appid,
            'cubename': cubes[0],
            'session-secret': get_random_secret_key(),
            'auth-authtkt-persistent-secret': get_random_secret_key(),
            'auth-authtkt-session-secret': get_random_secret_key(),
        }
        fill_templated_file(template_fpath, target_fpath, context)
 def test_merge1(self):
     merged = merge_options([('dothis', {'type':'yn', 'action': 'store', 'default': True,  'metavar': '<y or n>'}),
                             ('dothis', {'type':'yn', 'action': 'store', 'default': False, 'metavar': '<y or n>'}),
                             ])
     self.assertEqual(len(merged), 1)
     self.assertEqual(merged[0][0], 'dothis')
     self.assertEqual(merged[0][1]['default'], True)
 def test_merge1(self):
     merged = merge_options([('dothis', {'type':'yn', 'action': 'store', 'default': True,  'metavar': '<y or n>'}),
                             ('dothis', {'type':'yn', 'action': 'store', 'default': False, 'metavar': '<y or n>'}),
                             ])
     self.assertEqual(len(merged), 1)
     self.assertEqual(merged[0][0], 'dothis')
     self.assertEqual(merged[0][1]['default'], True)
 def test_merge2(self):
     merged = merge_options([('dothis', {'type':'yn', 'action': 'store', 'default': True,  'metavar': '<y or n>'}),
                             ('value', {'type': 'string', 'metavar': '<string>', 'short': 'v'}),
                             ('dothis', {'type':'yn', 'action': 'store', 'default': False, 'metavar': '<y or n>'}),
                             ])
     self.assertEqual(len(merged), 2)
     self.assertEqual(merged[0][0], 'value')
     self.assertEqual(merged[1][0], 'dothis')
     self.assertEqual(merged[1][1]['default'], True)
 def test_merge2(self):
     merged = merge_options([('dothis', {'type':'yn', 'action': 'store', 'default': True,  'metavar': '<y or n>'}),
                             ('value', {'type': 'string', 'metavar': '<string>', 'short': 'v'}),
                             ('dothis', {'type':'yn', 'action': 'store', 'default': False, 'metavar': '<y or n>'}),
                             ])
     self.assertEqual(len(merged), 2)
     self.assertEqual(merged[0][0], 'value')
     self.assertEqual(merged[1][0], 'dothis')
     self.assertEqual(merged[1][1]['default'], True)
 def test_merge1(self):
     merged = merge_options(
         [
             ("dothis", {"type": "yn", "action": "store", "default": True, "metavar": "<y or n>"}),
             ("dothis", {"type": "yn", "action": "store", "default": False, "metavar": "<y or n>"}),
         ]
     )
     self.assertEqual(len(merged), 1)
     self.assertEqual(merged[0][0], "dothis")
     self.assertEqual(merged[0][1]["default"], True)
 def test_merge2(self):
     merged = merge_options(
         [
             ("dothis", {"type": "yn", "action": "store", "default": True, "metavar": "<y or n>"}),
             ("value", {"type": "string", "metavar": "<string>", "short": "v"}),
             ("dothis", {"type": "yn", "action": "store", "default": False, "metavar": "<y or n>"}),
         ]
     )
     self.assertEqual(len(merged), 2)
     self.assertEqual(merged[0][0], "value")
     self.assertEqual(merged[1][0], "dothis")
     self.assertEqual(merged[1][1]["default"], True)
Exemple #9
0
class AllInOneConfiguration(WebConfigurationBase, ServerConfiguration):
    """repository and web instance in the same Pyramid process"""
    name = 'all-in-one'
    options = merge_options((('profile', {
        'type': 'string',
        'default': None,
        'help':
        'profile code and use the specified file to store stats if this option is set',
        'group': 'web',
        'level': 3,
    }), ) + WebConfigurationBase.options + ServerConfiguration.options)

    cubicweb_appobject_path = (WebConfigurationBase.cubicweb_appobject_path
                               | ServerConfiguration.cubicweb_appobject_path)
    cube_appobject_path = (WebConfigurationBase.cube_appobject_path
                           | ServerConfiguration.cube_appobject_path)
Exemple #10
0
class WebConfigurationBase(WebConfiguration):
    """web instance (in a web server) client of a RQL server"""

    options = merge_options((
        # ctl configuration
        ('port', {
            'type': 'int',
            'default': None,
            'help': 'http server port number (default to 8080)',
            'group': 'web',
            'level': 0,
        }),
        ('interface', {
            'type': 'string',
            'default': '0.0.0.0',
            'help':
            'http server address on which to listen (default to everywhere)',
            'group': 'web',
            'level': 1,
        }),
        (
            'max-post-length',  # XXX specific to "wsgi" server
            {
                'type': 'bytes',
                'default': '100MB',
                'help': 'maximum length of HTTP request. Default to 100 MB.',
                'group': 'web',
                'level': 1,
            }),
        ('pid-file', {
            'type': 'string',
            'default': Method('default_pid_file'),
            'help': 'repository\'s pid file',
            'group': 'main',
            'level': 2,
        }),
    ) + WebConfiguration.options)

    def default_base_url(self):
        from socket import getfqdn
        return 'http://%s:%s/' % (self['host']
                                  or getfqdn().lower(), self['port'] or 8080)
Exemple #11
0
class ShellCommand(Command):
    """Run an interactive migration shell on an instance. This is a python shell
    with enhanced migration commands predefined in the namespace. An additional
    argument may be given corresponding to a file containing commands to execute
    in batch mode.

    By default it will connect to a local instance using an in memory
    connection, unless a URL to a running instance is specified.

    Arguments after bare "--" string will not be processed by the shell command
    You can use it to pass extra arguments to your script and expect for
    them in '__args__' afterwards.

    <instance>
      the identifier of the instance to connect.
    """
    name = 'shell'
    arguments = '<instance> [batch command file(s)] [-- <script arguments>]'
    min_args = 1
    max_args = None
    options = merge_options((
        ('system-only',
         {'short': 'S', 'action': 'store_true',
          'help': 'only connect to the system source when the instance is '
          'using multiple sources. You can\'t use this option and the '
          '--ext-sources option at the same time.',
          'group': 'local'
          }),

        ('ext-sources',
         {'short': 'E', 'type': 'csv', 'metavar': '<sources>',
          'help': "For multisources instances, specify to which sources the \
repository should connect to for upgrading. When unspecified or 'all' given, \
will connect to all defined sources. If 'migration' is given, appropriate \
sources for migration will be automatically selected.",
          'group': 'local'
          }),

        ('force',
         {'short': 'f', 'action': 'store_true',
          'help': 'don\'t check instance is up to date.',
          'group': 'local'
          }),

    ) + InstanceCommand.options)

    def _get_mih(self, appid):
        """ returns migration context handler & shutdown function """
        config = cwcfg.config_for(appid)
        if self.config.ext_sources:
            assert not self.config.system_only
            sources = self.config.ext_sources
        elif self.config.system_only:
            sources = ('system',)
        else:
            sources = ('all',)
        config.set_sources_mode(sources)
        config.repairing = self.config.force
        mih = config.migration_handler()
        return mih, lambda: mih.shutdown()

    def run(self, args):
        appuri = args.pop(0)
        mih, shutdown_callback = self._get_mih(appuri)
        try:
            with mih.cnx:
                with mih.cnx.security_enabled(False, False):
                    if args:
                        # use cmdline parser to access left/right attributes only
                        # remember that usage requires instance appid as first argument
                        scripts, args = self.cmdline_parser.largs[1:], self.cmdline_parser.rargs
                        for script in scripts:
                            mih.cmd_process_script(script, scriptargs=args)
                            mih.commit()
                    else:
                        mih.interactive_shell()
        finally:
            shutdown_callback()
Exemple #12
0
class LDAPFeedSource(datafeed.DataFeedSource):
    """LDAP feed source: unlike ldapuser source, this source is copy based and
    will import ldap content (beside passwords for authentication) into the
    system source.
    """
    support_entities = {'CWUser': False}
    use_cwuri_as_url = False

    options = (
        ('auth-mode', {
            'type': 'choice',
            'default': 'simple',
            'choices': ('simple', 'digest_md5', 'gssapi'),
            'help':
            'authentication mode used to authenticate user to the ldap.',
            'group': 'ldap-source',
            'level': 3,
        }),
        ('auth-realm', {
            'type': 'string',
            'default': None,
            'help': 'realm to use when using gssapi/kerberos authentication.',
            'group': 'ldap-source',
            'level': 3,
        }),
        ('data-cnx-dn', {
            'type': 'string',
            'default': '',
            'help':
            'user dn to use to open data connection to the ldap (eg used \
to respond to rql queries). Leave empty for anonymous bind',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('data-cnx-password', {
            'type': 'string',
            'default': '',
            'help':
            'password to use to open data connection to the ldap (eg used to respond to rql queries). Leave empty for anonymous bind.',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('user-base-dn', {
            'type': 'string',
            'default': '',
            'help':
            'base DN to lookup for users; disable user importation mechanism if unset',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('user-scope', {
            'type': 'choice',
            'default': 'ONELEVEL',
            'choices': ('BASE', 'ONELEVEL', 'SUBTREE'),
            'help':
            'user search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('user-classes', {
            'type': 'csv',
            'default': ('top', 'posixAccount'),
            'help':
            'classes of user (with Active Directory, you want to say "user" here)',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('user-filter', {
            'type': 'string',
            'default': '',
            'help':
            'additional filters to be set in the ldap query to find valid users',
            'group': 'ldap-source',
            'level': 2,
        }),
        ('user-login-attr', {
            'type': 'string',
            'default': 'uid',
            'help':
            'attribute used as login on authentication (with Active Directory, you want to use "sAMAccountName" here)',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('user-default-group', {
            'type': 'csv',
            'default': ('users', ),
            'help': 'name of a group in which ldap users will be by default. \
You can set multiple groups by separating them by a comma.',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('user-attrs-map', {
            'type': 'named',
            'default': {
                'uid': 'login'
            },
            'help':
            'map from ldap user attributes to cubicweb attributes (with Active Directory, you want to use sAMAccountName:login,mail:email,givenName:firstname,sn:surname)',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('group-base-dn', {
            'type': 'string',
            'default': '',
            'help':
            'base DN to lookup for groups; disable group importation mechanism if unset',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('group-scope', {
            'type': 'choice',
            'default': 'ONELEVEL',
            'choices': ('BASE', 'ONELEVEL', 'SUBTREE'),
            'help':
            'group search scope (valid values: "BASE", "ONELEVEL", "SUBTREE")',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('group-classes', {
            'type': 'csv',
            'default': ('top', 'posixGroup'),
            'help': 'classes of group',
            'group': 'ldap-source',
            'level': 1,
        }),
        ('group-filter', {
            'type': 'string',
            'default': '',
            'help':
            'additional filters to be set in the ldap query to find valid groups',
            'group': 'ldap-source',
            'level': 2,
        }),
        ('group-attrs-map', {
            'type': 'named',
            'default': {
                'cn': 'name',
                'memberUid': 'member'
            },
            'help': 'map from ldap group attributes to cubicweb attributes',
            'group': 'ldap-source',
            'level': 1,
        }),
    )

    options = merge_options(
        datafeed.DataFeedSource.options + options,
        optgroup='ldap-source',
    )

    _conn = None

    def check_urls(self, source_entity):
        urls = super(LDAPFeedSource, self).check_urls(source_entity)

        if len(urls) > 1:
            raise ValidationError(source_entity.eid,
                                  {'url': _('can only have one url')})

        try:
            protocol, hostport = urls[0].split('://')
        except ValueError:
            raise ValidationError(source_entity.eid,
                                  {'url': _('badly formatted url')})
        if protocol not in PROTO_PORT:
            raise ValidationError(source_entity.eid,
                                  {'url': _('unsupported protocol')})

        return urls

    def init(self, source_entity):
        super(LDAPFeedSource, self).init(source_entity)
        typedconfig = self.config
        self.authmode = typedconfig['auth-mode']
        self._authenticate = getattr(self, '_auth_%s' % self.authmode)
        self.cnx_dn = typedconfig['data-cnx-dn']
        self.cnx_pwd = typedconfig['data-cnx-password']
        self.user_base_dn = str(typedconfig['user-base-dn'])
        self.user_base_scope = globals()[typedconfig['user-scope']]
        self.user_login_attr = typedconfig['user-login-attr']
        self.user_default_groups = typedconfig['user-default-group']
        self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'}
        self.user_attrs.update(typedconfig['user-attrs-map'])
        self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.items())
        self.base_filters = [
            '(objectclass=%s)' % replace_filter(o)
            for o in typedconfig['user-classes']
        ]
        if typedconfig['user-filter']:
            self.base_filters.append(typedconfig['user-filter'])
        self.group_base_dn = str(typedconfig['group-base-dn'])
        self.group_base_scope = LDAP_SCOPES[typedconfig['group-scope']]
        self.group_attrs = typedconfig['group-attrs-map']
        self.group_attrs = {
            'dn': 'eid',
            'modifyTimestamp': 'modification_date'
        }
        self.group_attrs.update(typedconfig['group-attrs-map'])
        self.group_rev_attrs = dict(
            (v, k) for k, v in self.group_attrs.items())
        self.group_base_filters = [
            '(objectClass=%s)' % replace_filter(o)
            for o in typedconfig['group-classes']
        ]
        if typedconfig['group-filter']:
            self.group_base_filters.append(typedconfig['group-filter'])
        self._conn = None

    def connection_info(self):
        assert len(self.urls) == 1, self.urls
        protocol, hostport = self.urls[0].split('://')
        if protocol != 'ldapi' and ':' in hostport:
            host, port = hostport.rsplit(':', 1)
        else:
            host, port = hostport, PROTO_PORT[protocol]
        return protocol, host, port

    def authenticate(self, cnx, login, password=None, **kwargs):
        """return CWUser eid for the given login/password if this account is
        defined in this source, else raise `AuthenticationError`

        two queries are needed since passwords are stored crypted, so we have
        to fetch the salt first
        """
        self.info('ldap authenticate %s', login)
        if not password:
            # On Windows + ADAM this would have succeeded (!!!)
            # You get Authenticated as: 'NT AUTHORITY\ANONYMOUS LOGON'.
            # we really really don't want that
            raise AuthenticationError()
        searchfilter = [
            '(%s=%s)' %
            (replace_filter(self.user_login_attr), replace_filter(login))
        ]
        searchfilter.extend(self.base_filters)
        searchstr = '(&%s)' % ''.join(searchfilter)
        # first search the user
        try:
            user = self._search(cnx, self.user_base_dn, self.user_base_scope,
                                searchstr)[0]
        except IndexError:
            # no such user
            raise AuthenticationError()
        # check password by establishing a (unused) connection
        try:
            self._connect(user, password)
        except ldap3.LDAPException as ex:
            # Something went wrong, most likely bad credentials
            self.info('while trying to authenticate %s: %s', user, ex)
            raise AuthenticationError()
        except Exception:
            self.error('while trying to authenticate %s', user, exc_info=True)
            raise AuthenticationError()
        rset = cnx.execute(
            'Any X,SN WHERE X cwuri %(extid)s, X is CWUser, '
            'X cw_source S, S name SN', {'extid': user['dn']})
        if not rset or rset[0][1] != self.uri:
            # user is not known or has been moved away from this source
            raise AuthenticationError()
        return rset[0][0]

    def _connect(self, user=None, userpwd=None):
        protocol, host, port = self.connection_info()
        self.info('connecting %s://%s:%s as %s', protocol, host, port,
                  user and user['dn'] or 'anonymous')
        server = ldap3.Server(host, port=int(port))
        conn = ldap3.Connection(
            server,
            user=user and user['dn'],
            client_strategy=ldap3.STRATEGY_SYNC_RESTARTABLE,
            auto_referrals=False)
        # Now bind with the credentials given. Let exceptions propagate out.
        if user is None:
            # XXX always use simple bind for data connection
            if not self.cnx_dn:
                conn.bind()
            else:
                self._authenticate(conn, {'dn': self.cnx_dn}, self.cnx_pwd)
        else:
            # user specified, we want to check user/password, no need to return
            # the connection which will be thrown out
            if not self._authenticate(conn, user, userpwd):
                raise AuthenticationError()
        return conn

    def _auth_simple(self, conn, user, userpwd):
        conn.authentication = ldap3.AUTH_SIMPLE
        conn.user = user['dn']
        conn.password = userpwd
        return conn.bind()

    def _auth_digest_md5(self, conn, user, userpwd):
        conn.authentication = ldap3.AUTH_SASL
        conn.sasl_mechanism = 'DIGEST-MD5'
        # realm, user, password, authz-id
        conn.sasl_credentials = (None, user['dn'], userpwd, None)
        return conn.bind()

    def _auth_gssapi(self, conn, user, userpwd):
        conn.authentication = ldap3.AUTH_SASL
        conn.sasl_mechanism = 'GSSAPI'
        return conn.bind()

    def _search(self, cnx, base, scope, searchstr='(objectClass=*)', attrs=()):
        """make an ldap query"""
        self.debug('ldap search %s %s %s %s %s', self.uri, base, scope,
                   searchstr, list(attrs))
        if self._conn is None:
            self._conn = self._connect()
        ldapcnx = self._conn
        if not ldapcnx.search(
                base, searchstr, search_scope=scope, attributes=attrs):
            return []
        result = []
        for rec in ldapcnx.response:
            if rec['type'] != 'searchResEntry':
                continue
            items = rec['attributes'].items()
            itemdict = self._process_ldap_item(rec['dn'], items)
            result.append(itemdict)
        self.debug('ldap built results %s', len(result))
        return result

    def _process_ldap_item(self, dn, iterator):
        """Turn an ldap received item into a proper dict."""
        itemdict = {'dn': dn}
        for key, value in iterator:
            if self.user_attrs.get(
                    key) == 'upassword':  # XXx better password detection
                value = value[0].encode('utf-8')
                # we only support ldap_salted_sha1 for ldap sources, see: server/utils.py
                if not value.startswith(b'{SSHA}'):
                    value = utils.crypt_password(value)
                itemdict[key] = Binary(value)
            elif self.user_attrs.get(key) == 'modification_date':
                itemdict[key] = datetime.strptime(value[0], '%Y%m%d%H%M%SZ')
            else:
                if len(value) == 1:
                    itemdict[key] = value = value[0]
                else:
                    itemdict[key] = value
        # we expect memberUid to be a list of user ids, make sure of it
        member = self.group_rev_attrs['member']
        if isinstance(itemdict.get(member), str):
            itemdict[member] = [itemdict[member]]
        return itemdict

    def _process_no_such_object(self, cnx, dn):
        """Some search return NO_SUCH_OBJECT error, handle this (usually because
Exemple #13
0
class BaseWebConfiguration(CubicWebConfiguration):
    """Base class for web configurations"""
    cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set(
        ['web.views'])
    cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(
        ['views'])

    options = merge_options(CubicWebConfiguration.options + (
        ('repository-uri', {
            'type': 'string',
            'default': 'inmemory://',
            'help':
            'see `cubicweb.dbapi.connect` documentation for possible value',
            'group': 'web',
            'level': 2,
        }),
        ('use-uicache', {
            'type': 'yn',
            'default': True,
            'help': _('should css be compiled and store in uicache'),
            'group': 'ui',
            'level': 2,
        }),
        ('anonymous-user', {
            'type':
            'string',
            'default':
            None,
            'help': ('login of the CubicWeb user account to use for anonymous '
                     'user (if you want to allow anonymous)'),
            'group':
            'web',
            'level':
            1,
        }),
        ('anonymous-password', {
            'type':
            'string',
            'default':
            None,
            'help':
            'password of the CubicWeb user account to use for anonymous user, '
            'if anonymous-user is set',
            'group':
            'web',
            'level':
            1,
        }),
        ('query-log-file', {
            'type': 'string',
            'default': None,
            'help': 'web instance query log file',
            'group': 'web',
            'level': 3,
        }),
        ('cleanup-anonymous-session-time', {
            'type':
            'time',
            'default':
            '5min',
            'help':
            'Same as cleanup-session-time but specific to anonymous '
            'sessions. You can have a much smaller timeout here since it will be '
            'transparent to the user. Default to 5min.',
            'group':
            'web',
            'level':
            3,
        }),
    ))

    def anonymous_user(self):
        """return a login and password to use for anonymous users.

        None may be returned for both if anonymous connection is not
        allowed or if an empty login is used in configuration
        """
        try:
            user = self['anonymous-user'] or None
            passwd = self['anonymous-password']
        except KeyError:
            user, passwd = None, None
        except UnicodeDecodeError:
            raise ConfigurationError(
                "anonymous information should only contains ascii")
        return user, passwd
Exemple #14
0
class WebConfiguration(BaseWebConfiguration):
    """the WebConfiguration is a singleton object handling instance's
    configuration and preferences
    """
    options = merge_options(BaseWebConfiguration.options + (
        # web configuration
        ('datadir-url', {
            'type':
            'string',
            'default':
            None,
            'help':
            ('base url for static data, if different from "${base-url}/data/".  '
             'If served from a different domain, that domain should allow '
             'cross-origin requests.'),
            'group':
            'web',
        }),
        ('auth-mode', {
            'type': 'choice',
            'choices': ('cookie', 'http'),
            'default': 'cookie',
            'help': 'authentication mode (cookie / http)',
            'group': 'web',
            'level': 3,
        }),
        ('realm', {
            'type': 'string',
            'default': 'cubicweb',
            'help': 'realm to use on HTTP authentication mode',
            'group': 'web',
            'level': 3,
        }),
        ('http-session-time', {
            'type':
            'time',
            'default':
            0,
            'help':
            "duration of the cookie used to store session identifier. "
            "If 0, the cookie will expire when the user exist its browser. "
            "Should be 0 or greater than repository\'s session-time.",
            'group':
            'web',
            'level':
            2,
        }),
        ('submit-mail', {
            'type':
            'string',
            'default':
            None,
            'help': ('Mail used as recipient to report bug in this instance, '
                     'if you want this feature on'),
            'group':
            'web',
            'level':
            2,
        }),
        ('language-mode', {
            'type':
            'choice',
            'choices': ('http-negotiation', 'url-prefix', ''),
            'default':
            'http-negotiation',
            'help':
            ('source for interface\'s language detection. '
             'If set to "http-negotiation" the Accept-Language HTTP header will be used,'
             ' if set to "url-prefix", the URL will be inspected for a'
             ' short language prefix.'),
            'group':
            'web',
            'level':
            2,
        }),
        ('print-traceback', {
            'type': 'yn',
            'default': CubicWebConfiguration.mode != 'system',
            'help':
            'print the traceback on the error page when an error occurred',
            'group': 'web',
            'level': 2,
        }),
        ('captcha-font-file', {
            'type': 'string',
            'default': join(_DATA_DIR, 'porkys.ttf'),
            'help': 'True type font to use for captcha image generation (you \
must have the python imaging library installed to use captcha)',
            'group': 'web',
            'level': 3,
        }),
        ('captcha-font-size', {
            'type': 'int',
            'default': 25,
            'help': 'Font size to use for captcha image generation (you must \
have the python imaging library installed to use captcha)',
            'group': 'web',
            'level': 3,
        }),
        ('concat-resources', {
            'type': 'yn',
            'default': False,
            'help':
            'use modconcat-like URLS to concat and serve JS / CSS files',
            'group': 'web',
            'level': 2,
        }),
        ('anonymize-jsonp-queries', {
            'type': 'yn',
            'default': True,
            'help':
            'anonymize the connection before executing any jsonp query.',
            'group': 'web',
            'level': 1
        }),
        ('generate-staticdir', {
            'type': 'yn',
            'default': False,
            'help': 'Generate the static data resource directory on upgrade.',
            'group': 'web',
            'level': 2,
        }),
        ('staticdir-path', {
            'type': 'string',
            'default': None,
            'help': 'The static data resource directory path.',
            'group': 'web',
            'level': 2,
        }),
        ('access-control-allow-origin', {
            'type':
            'csv',
            'default': (),
            'help':
            ('comma-separated list of allowed origin domains or "*" for any domain'
             ),
            'group':
            'web',
            'level':
            2,
        }),
        ('access-control-allow-methods', {
            'type': 'csv',
            'default': (),
            'help': ('comma-separated list of allowed HTTP methods'),
            'group': 'web',
            'level': 2,
        }),
        ('access-control-max-age', {
            'type': 'int',
            'default': None,
            'help':
            ('maximum age of cross-origin resource sharing (in seconds)'),
            'group': 'web',
            'level': 2,
        }),
        ('access-control-expose-headers', {
            'type':
            'csv',
            'default': (),
            'help': ('comma-separated list of HTTP headers the application '
                     'declare in response to a preflight request'),
            'group':
            'web',
            'level':
            2,
        }),
        ('access-control-allow-headers', {
            'type':
            'csv',
            'default': (),
            'help':
            ('comma-separated list of HTTP headers the application may set in the response'
             ),
            'group':
            'web',
            'level':
            2,
        }),
    ))

    def __init__(self, *args, **kwargs):
        super(WebConfiguration, self).__init__(*args, **kwargs)
        self.uiprops = None
        self.datadir_url = None

    def fckeditor_installed(self):
        if self.uiprops is None:
            return False
        return exists(self.uiprops.get('FCKEDITOR_PATH', ''))

    def cwproperty_definitions(self):
        for key, pdef in super(WebConfiguration,
                               self).cwproperty_definitions():
            if key == 'ui.fckeditor' and not self.fckeditor_installed():
                continue
            yield key, pdef

    @cachedproperty
    def _instance_salt(self):
        """This random key/salt is used to sign content to be sent back by
        browsers, eg. in the error report form.
        """
        return str(uuid4()).encode('ascii')

    def sign_text(self, text):
        """sign some text for later checking"""
        # hmac.new expect bytes
        if isinstance(text, str):
            text = text.encode('utf-8')
        # replace \r\n so we do not depend on whether a browser "reencode"
        # original message using \r\n or not
        return hmac.new(self._instance_salt,
                        text.strip().replace(b'\r\n', b'\n'),
                        digestmod="sha3_512").hexdigest()

    def check_text_sign(self, text, signature):
        """check the text signature is equal to the given signature"""
        return self.sign_text(text) == signature

    def locate_resource(self, rid):
        """return the (directory, filename) where the given resource
        may be found
        """
        return self._fs_locate(rid, 'data')

    def locate_doc_file(self, fname):
        """return the directory where the given resource may be found"""
        return self._fs_locate(fname, 'wdoc')[0]

    @cached
    def _fs_path_locate(self, rid, rdirectory):
        """return the directory where the given resource may be found"""
        path = [self.apphome] + self.cubes_path() + [dirname(__file__)]
        for directory in path:
            if exists(join(directory, rdirectory, rid)):
                return directory

    def _fs_locate(self, rid, rdirectory):
        """return the (directory, filename) where the given resource
        may be found
        """
        directory = self._fs_path_locate(rid, rdirectory)
        if directory is None:
            return None, None
        if self['use-uicache'] and rdirectory == 'data' and rid.endswith(
                '.css'):
            return self.ensure_uid_directory(
                self.uiprops.process_resource(join(directory, rdirectory),
                                              rid)), rid
        return join(directory, rdirectory), rid

    def locate_all_files(self, rid, rdirectory='wdoc'):
        """return all files corresponding to the given resource"""
        path = [self.apphome] + self.cubes_path() + [dirname(__file__)]
        for directory in path:
            fpath = join(directory, rdirectory, rid)
            if exists(fpath):
                yield join(fpath)

    def load_configuration(self, **kw):
        """load instance's configuration files"""
        super(WebConfiguration, self).load_configuration(**kw)
        # load external resources definition
        self._init_base_url()
        self._build_ui_properties()

    def _init_base_url(self):
        # normalize base url(s)
        baseurl = self['base-url'] or self.default_base_url()
        if baseurl and baseurl[-1] != '/':
            baseurl += '/'
        if not (self.repairing or self.creating):
            self.global_set_option('base-url', baseurl)
        self.datadir_url = self['datadir-url']
        if self.datadir_url:
            if self.datadir_url[-1] != '/':
                self.datadir_url += '/'
            if self.mode != 'test':
                self.datadir_url += '%s/' % self.instance_md5_version()
            return
        data_relpath = self.data_relpath()
        self.datadir_url = baseurl + data_relpath

    def data_relpath(self):
        if self.mode == 'test':
            return 'data/'
        return 'data/%s/' % self.instance_md5_version()

    def _build_ui_properties(self):
        # self.datadir_url[:-1] to remove trailing /
        from cubicweb.web.propertysheet import PropertySheet
        cachedir = join(self.appdatahome, 'uicache')
        self.check_writeable_uid_directory(cachedir)
        self.uiprops = PropertySheet(cachedir,
                                     data=lambda x: self.datadir_url + x,
                                     datadir_url=self.datadir_url[:-1])
        self._init_uiprops(self.uiprops)

    def _init_uiprops(self, uiprops):
        libuiprops = join(_DATA_DIR, 'uiprops.py')
        uiprops.load(libuiprops)
        for path in reversed([self.apphome] + self.cubes_path()):
            self._load_ui_properties_file(uiprops, path)
        self._load_ui_properties_file(uiprops, self.apphome)
        datadir_url = uiprops.context['datadir_url']
        cubicweb_js_url = datadir_url + '/cubicweb.js'
        if cubicweb_js_url not in uiprops['JAVASCRIPTS']:
            uiprops['JAVASCRIPTS'].insert(0, cubicweb_js_url)

    def _load_ui_properties_file(self, uiprops, path):
        uipropsfile = join(path, 'uiprops.py')
        if exists(uipropsfile):
            self.debug('loading %s', uipropsfile)
            uiprops.load(uipropsfile)

    # static files handling ###################################################

    @property
    def static_directory(self):
        return join(self.appdatahome, 'static')

    def static_file_exists(self, rpath):
        return exists(join(self.static_directory, rpath))

    def static_file_open(self, rpath, mode='wb'):
        staticdir = self.static_directory
        rdir, filename = split(rpath)
        if rdir:
            staticdir = join(staticdir, rdir)
            if not isdir(staticdir) and 'w' in mode:
                self.check_writeable_uid_directory(staticdir)
        return open(join(staticdir, filename), mode)

    def static_file_add(self, rpath, data):
        stream = self.static_file_open(rpath)
        stream.write(data)
        stream.close()
        self.ensure_uid(rpath)

    def static_file_del(self, rpath):
        if self.static_file_exists(rpath):
            os.remove(join(self.static_directory, rpath))
Exemple #15
0
class PyramidStartHandler(InstanceCommand):
    """Start an interactive pyramid server.

    <instance>
      identifier of the instance to configure.
    """
    name = 'pyramid'
    actionverb = 'started'

    options = merge_options((
        ('debug-mode', {
            'action':
            'store_true',
            'help':
            'Activate the repository debug mode ('
            'logs in the console and the debug toolbar).'
        }),
        ('debug', {
            'short': 'D',
            'action': 'store_true',
            'help': 'Equals to "--debug-mode --reload"'
        }),
        ('toolbar', {
            'short':
            't',
            'action':
            'store_true',
            'help':
            'Activate the pyramid debug toolbar'
            '(the pypi "pyramid_debugtoolbar" package must be installed)'
        }),
        ('reload', {
            'action': 'store_true',
            'help': 'Restart the server if any source file is changed'
        }),
        ('reload-interval', {
            'type': 'int',
            'default': 1,
            'help': 'Interval, in seconds, between file modifications checks'
        }),
        ('profile', {
            'action': 'store_true',
            'default': False,
            'help': 'Enable profiling'
        }),
        ('profile-output', {
            'type': 'string',
            'default': None,
            'help': 'Profiling output file (default: "program.prof")'
        }),
        ('profile-dump-every', {
            'type':
            'int',
            'default':
            None,
            'metavar':
            'N',
            'help':
            'Dump profile stats to ouput every N requests '
            '(default: 100)'
        }),
        ('param', {
            'short': 'p',
            'type': 'named',
            'metavar': 'key1:value1,key2:value2',
            'default': {},
            'help': 'override <key> configuration file option with <value>.'
        }),
    ) + InstanceCommand.options)

    _reloader_environ_key = 'CW_RELOADER_SHOULD_RUN'

    def debug(self, msg):
        print('DEBUG - %s' % msg)

    def info(self, msg):
        print('INFO - %s' % msg)

    def quote_first_command_arg(self, arg):
        """
        There's a bug in Windows when running an executable that's
        located inside a path with a space in it.  This method handles
        that case, or on non-Windows systems or an executable with no
        spaces, it just leaves well enough alone.
        """
        if (sys.platform != 'win32' or ' ' not in arg):
            # Problem does not apply:
            return arg
        try:
            import win32api
        except ImportError:
            raise ValueError(
                "The executable %r contains a space, and in order to "
                "handle this issue you must have the win32api module "
                "installed" % arg)
        arg = win32api.GetShortPathName(arg)
        return arg

    def restart_with_reloader(self, filelist_path):
        self.debug('Starting subprocess with file monitor')

        # Create or clear monitored files list file.
        with open(filelist_path, 'w') as f:
            pass

        while True:
            args = [self.quote_first_command_arg(sys.executable)] + sys.argv
            new_environ = os.environ.copy()
            new_environ[self._reloader_environ_key] = 'true'
            proc = None
            try:
                try:
                    proc = subprocess.Popen(args, env=new_environ)
                    exit_code = proc.wait()
                    proc = None
                    print("Process exited with", exit_code)
                except KeyboardInterrupt:
                    self.info('^C caught in monitor process')
                    return 1
            finally:
                if proc is not None:
                    proc.terminate()
                    self.info(
                        'Waiting for the server to stop. Hit CTRL-C to exit')
                    exit_code = proc.wait()

            if exit_code != 3:
                with open(filelist_path) as f:
                    filelist = [line.strip() for line in f]
                if filelist:
                    self.info("Reloading failed. Waiting for a file to change")
                    mon = Monitor(extra_files=filelist, nomodules=True)
                    while mon.check_reload():
                        time.sleep(1)
                else:
                    return exit_code

            self.info('%s %s %s' % ('-' * 20, 'Restarting', '-' * 20))

    def set_needreload(self):
        self._needreload = True

    def install_reloader(self, poll_interval, extra_files, filelist_path):
        mon = Monitor(poll_interval=poll_interval,
                      extra_files=extra_files,
                      atexit=self.set_needreload,
                      filelist_path=filelist_path)
        mon_thread = threading.Thread(target=mon.periodic_reload)
        mon_thread.daemon = True
        mon_thread.start()

    def configfiles(self, cwconfig):
        """Generate instance configuration filenames"""
        yield cwconfig.main_config_file()
        for f in ('sources', 'logging.conf', 'pyramid.ini',
                  'pyramid-debug.ini'):
            f = os.path.join(cwconfig.apphome, f)
            if os.path.exists(f):
                yield f

    def i18nfiles(self, cwconfig):
        """Generate instance i18n files"""
        i18ndir = os.path.join(cwconfig.apphome, 'i18n')
        if os.path.exists(i18ndir):
            for lang in cwconfig.available_languages():
                f = os.path.join(i18ndir, lang, 'LC_MESSAGES', 'cubicweb.mo')
                if os.path.exists(f):
                    yield f

    def pyramid_instance(self, appid):
        self._needreload = False

        autoreload = self['reload'] or self['debug']

        cwconfig = self.cwconfig
        filelist_path = os.path.join(cwconfig.apphome,
                                     '.pyramid-reload-files.list')

        pyramid_ini_path = os.path.join(cwconfig.apphome, "pyramid.ini")
        if not os.path.exists(pyramid_ini_path):
            _generate_pyramid_ini_file(pyramid_ini_path)

        if autoreload and not os.environ.get(self._reloader_environ_key):
            return self.restart_with_reloader(filelist_path)

        if autoreload:
            _turn_sigterm_into_systemexit()
            self.debug('Running reloading file monitor')
            extra_files = [sys.argv[0]]
            extra_files.extend(self.configfiles(cwconfig))
            extra_files.extend(self.i18nfiles(cwconfig))
            self.install_reloader(self['reload-interval'],
                                  extra_files,
                                  filelist_path=filelist_path)

        # if no loglevel is specified and --debug is here, set log level at debug
        if self['loglevel'] is None and self['debug']:
            init_cmdline_log_threshold(self.cwconfig, 'debug')

        # if the debugtoolbar is activated, test if it's importable
        if self['toolbar']:
            try:
                import pyramid_debugtoolbar  # noqa
            except ImportError:
                print(
                    "Error: you've tried to activate the pyramid debugtoolbar but it failed to "
                    "import, make sure it's correctly installed by doing a "
                    "'pip install pyramid_debugtoolbar'.\nYou can find more information on the "
                    "official documentation: "
                    "https://docs.pylonsproject.org/projects/pyramid_debugtoolbar/en/latest/"
                )
                sys.exit(1)

        if self['debug']:
            # this is for injecting those into generated html:
            # > cubicweb-generated-by="module.Class" cubicweb-from-source="/path/to/file.py:42"
            inject_html_generating_call_on_w()

        app = wsgi_application_from_cwconfig(
            cwconfig,
            profile=self['profile'],
            profile_output=self['profile-output'],
            profile_dump_every=self['profile-dump-every'],
            debugtoolbar=self['toolbar'])

        host = cwconfig['interface']
        port = cwconfig['port'] or 8080
        url_scheme = ('https'
                      if cwconfig['base-url'].startswith('https') else 'http')
        repo = app.application.registry['cubicweb.repository']
        try:
            waitress.serve(app,
                           host=host,
                           port=port,
                           url_scheme=url_scheme,
                           clear_untrusted_proxy_headers=True)
        finally:
            repo.shutdown()
        if self._needreload:
            return 3
        return 0
Exemple #16
0
    'named',
    'metavar':
    '[section1.]key1:value1,[section2.]key2:value2',
    'default':
    None,
    'help':
    '''set <key> in <section> to <value> in "source" configuration file. If
<section> is not specified, it defaults to "system".

Beware that changing admin.login or admin.password using this command
will NOT update the database with new admin credentials.  Use the
reset-admin-pwd command instead.
''',
}), )

ConfigureInstanceCommand.options = merge_options(
    ConfigureInstanceCommand.options + db_options)

configure_instance = ConfigureInstanceCommand.configure_instance


def configure_instance2(self, appid):
    configure_instance(self, appid)
    if self.config.db is not None:
        appcfg = ServerConfiguration.config_for(appid)
        srccfg = appcfg.read_sources_file()
        for key, value in self.config.db.items():
            if '.' in key:
                section, key = key.split('.', 1)
            else:
                section = 'system'
            try:
Exemple #17
0
def register_persistent_options(options):
    global PERSISTENT_OPTIONS
    PERSISTENT_OPTIONS = merge_options(PERSISTENT_OPTIONS + options)
Exemple #18
0
class ServerConfiguration(CubicWebConfiguration):
    """standalone RQL server"""
    name = 'repository'

    cubicweb_appobject_path = CubicWebConfiguration.cubicweb_appobject_path | set(
        ['sobjects', 'hooks'])
    cube_appobject_path = CubicWebConfiguration.cube_appobject_path | set(
        ['sobjects', 'hooks'])

    options = lgconfig.merge_options((
        # ctl configuration
        ('host', {
            'type': 'string',
            'default': None,
            'help':
            'host name if not correctly detectable through gethostname',
            'group': 'main',
            'level': 1,
        }),
        ('pid-file', {
            'type': 'string',
            'default': lgconfig.Method('default_pid_file'),
            'help': 'repository\'s pid file',
            'group': 'main',
            'level': 2,
        }),
        ('uid', {
            'type': 'string',
            'default': None,
            'help':
            'unix user, if this option is set, use the specified user to start \
the repository rather than the user running the command',
            'group': 'main',
            'level': (CubicWebConfiguration.mode == 'installed') and 0 or 1,
        }),
        ('cleanup-session-time', {
            'type':
            'time',
            'default':
            '24h',
            'help':
            'duration of inactivity after which a session '
            'will be closed, to limit memory consumption (avoid sessions that '
            'never expire and cause memory leak when http-session-time is 0, or '
            'because of bad client that never closes their connection). '
            'So notice that even if http-session-time is 0 and the user don\'t '
            'close his browser, he will have to reauthenticate after this time '
            'of inactivity. Default to 24h.',
            'group':
            'main',
            'level':
            3,
        }),
        ('connections-pooler-enabled', {
            'type': 'yn',
            'default': True,
            'help': 'enable the connection pooler',
            'group': 'main',
            'level': 3,
        }),
        ('connections-pool-size', {
            'type': 'int',
            'default': 4,
            'help':
            'size of the connections pool. Each source supporting multiple \
connections will have this number of opened connections.',
            'group': 'main',
            'level': 3,
        }),
        ('rql-cache-size', {
            'type': 'int',
            'default': 3000,
            'help': 'size of the parsed rql cache size.',
            'group': 'main',
            'level': 3,
        }),
        ('undo-enabled', {
            'type': 'yn',
            'default': False,
            'help': 'enable undo support',
            'group': 'main',
            'level': 3,
        }),
        ('keep-transaction-lifetime', {
            'type': 'int',
            'default': 7,
            'help':
            'number of days during which transaction records should be \
kept (hence undoable).',
            'group': 'main',
            'level': 3,
        }),
        ('multi-sources-etypes', {
            'type': 'csv',
            'default': (),
            'help': 'defines which entity types from this repository are used \
by some other instances. You should set this properly for these instances to \
detect updates / deletions.',
            'group': 'main',
            'level': 3,
        }),
        ('delay-full-text-indexation', {
            'type':
            'yn',
            'default':
            False,
            'help':
            'When full text indexation of entity has a too important cost'
            ' to be done when entity are added/modified by users, activate this '
            'option and setup a job using cubicweb-ctl db-rebuild-fti on your '
            'system (using cron for instance).',
            'group':
            'main',
            'level':
            3,
        }),

        # email configuration
        ('default-recipients-mode', {
            'type': 'choice',
            'choices': ('default-dest-addrs', 'users', 'none'),
            'default': 'default-dest-addrs',
            'help':
            'when a notification should be sent with no specific rules \
to find recipients, recipients will be found according to this mode. Available \
modes are "default-dest-addrs" (emails specified in the configuration \
variable with the same name), "users" (every users which has activated \
account with an email set), "none" (no notification).',
            'group': 'email',
            'level': 2,
        }),
        ('default-dest-addrs', {
            'type': 'csv',
            'default': (),
            'help':
            'comma separated list of email addresses that will be used \
as default recipient when an email is sent and the notification has no \
specific recipient rules.',
            'group': 'email',
            'level': 2,
        }),
        ('supervising-addrs', {
            'type': 'csv',
            'default': (),
            'help': 'comma separated list of email addresses that will be \
notified of every changes.',
            'group': 'email',
            'level': 2,
        }),
        ('zmq-address-sub', {
            'type':
            'csv',
            'default': (),
            'help': ('List of ZMQ addresses to subscribe to (requires pyzmq) '
                     '(of the form `tcp://<ipaddr>:<port>`)'),
            'group':
            'zmq',
            'level':
            1,
        }),
        ('zmq-address-pub', {
            'type':
            'string',
            'default':
            None,
            'help': ('ZMQ address to use for publishing (requires pyzmq) '
                     '(of the form `tcp://<ipaddr>:<port>`)'),
            'group':
            'zmq',
            'level':
            1,
        }),
    ) + CubicWebConfiguration.options)

    # read the schema from the database
    read_instance_schema = True
    # set this to true to get a minimal repository, for instance to get cubes
    # information on commands such as i18ninstance, db-restore, etc...
    quick_start = False
    # check user's state at login time
    consider_user_state = True

    # should some hooks be deactivated during [pre|post]create script execution
    free_wheel = False

    # list of enables sources when sources restriction is necessary
    # (eg repository initialization at least)
    enabled_sources = None

    def bootstrap_cubes(self):
        from logilab.common.textutils import splitstrip
        with open(join(self.apphome, 'bootstrap_cubes')) as f:
            for line in f:
                line = line.strip()
                if not line or line.startswith('#'):
                    continue
                self.init_cubes(self.expand_cubes(splitstrip(line)))
                break
            else:
                # no cubes
                self.init_cubes(())

    def write_bootstrap_cubes_file(self, cubes):
        stream = open(join(self.apphome, 'bootstrap_cubes'), 'w')
        stream.write('# this is a generated file only used for bootstraping\n')
        stream.write('# you should not have to edit this\n')
        stream.write('%s\n' % ','.join(cubes))
        stream.close()

    def sources_file(self):
        return join(self.apphome, 'sources')

    # this method has to be cached since when the server is running using a
    # restricted user, this user usually don't have access to the sources
    # configuration file (#16102)
    @cached
    def read_sources_file(self):
        """return a dictionary of values found in the sources file"""
        return read_config(self.sources_file(), raise_if_unreadable=True)

    @property
    def system_source_config(self):
        return self.read_sources_file()['system']

    @property
    def default_admin_config(self):
        return self.read_sources_file()['admin']

    def source_enabled(self, source):
        if self.sources_mode is not None:
            if 'migration' in self.sources_mode:
                assert len(self.sources_mode) == 1
                if source.connect_for_migration:
                    return True
                print('not connecting to source', source.uri,
                      'during migration')
                return False
            if 'all' in self.sources_mode:
                assert len(self.sources_mode) == 1
                return True
            return source.uri in self.sources_mode
        if self.quick_start:
            return source.uri == 'system'
        return (not source.disabled
                and (not self.enabled_sources
                     or source.uri in self.enabled_sources))

    def write_sources_file(self, sourcescfg):
        """serialize repository'sources configuration into a INI like file"""
        sourcesfile = self.sources_file()
        if exists(sourcesfile):
            import shutil
            shutil.copy(sourcesfile, sourcesfile + '.bak')
        stream = open(sourcesfile, 'w')
        for section in ('admin', 'system'):
            sconfig = sourcescfg[section]
            if isinstance(sconfig, dict):
                # get a Configuration object
                assert section == 'system', '%r is not system' % section
                _sconfig = SourceConfiguration(
                    self, options=SOURCE_TYPES['native'].options)
                for attr, val in sconfig.items():
                    try:
                        _sconfig.set_option(attr, val)
                    except lgconfig.OptionError:
                        # skip adapter, may be present on pre 3.10 instances
                        if attr != 'adapter':
                            self.error(
                                'skip unknown option %s in sources file' %
                                attr)
                sconfig = _sconfig
            stream.write('[%s]\n%s\n' %
                         (section, generate_source_config(sconfig)))
        restrict_perms_to_user(sourcesfile)

    def load_schema(self, expand_cubes=False, **kwargs):
        from cubicweb.schema import CubicWebSchemaLoader
        if expand_cubes:
            # in case some new dependencies have been introduced, we have to
            # reinitialize cubes so the full filesystem schema is read
            origcubes = self.cubes()
            self._cubes = None
            self.init_cubes(self.expand_cubes(origcubes))
        schema = CubicWebSchemaLoader().load(self, **kwargs)
        if expand_cubes:
            # restore original value
            self._cubes = origcubes
        return schema

    def load_bootstrap_schema(self):
        from cubicweb.schema import BootstrapSchemaLoader
        schema = BootstrapSchemaLoader().load(self)
        schema.name = 'bootstrap'
        return schema

    sources_mode = None

    def set_sources_mode(self, sources):
        self.sources_mode = sources

    def migration_handler(self,
                          schema=None,
                          interactive=True,
                          cnx=None,
                          repo=None,
                          connect=True,
                          verbosity=None):
        """return a migration handler instance"""
        from cubicweb.server.migractions import ServerMigrationHelper
        if verbosity is None:
            verbosity = getattr(self, 'verbosity', 0)
        return ServerMigrationHelper(self,
                                     schema,
                                     interactive=interactive,
                                     cnx=cnx,
                                     repo=repo,
                                     connect=connect,
                                     verbosity=verbosity)