Example #1
0
    async def initialize_catalog(self, site):
        await self.remove_catalog(site)
        mappings = get_mappings()
        index_name = self.get_index_name(site)
        version = self.get_version(site)
        real_index_name = index_name + '_' + str(version)
        index_settings = DEFAULT_SETTINGS.copy()
        index_settings.update(app_settings.get('index', {}))
        try:
            await self.conn.indices.create(real_index_name)
        except TransportError as e:
            logger.warn('Transport Error', exc_info=e)
        except ConnectionError:
            logger.warn('elasticsearch not installed', exc_info=True)
            pass
        except RequestError:
            pass

        try:
            await self.conn.indices.put_alias(index_name, real_index_name)
        except TransportError as e:
            logger.warn('Transport Error', exc_info=e)
        except ConnectionError:
            logger.warn('elasticsearch not installed', exc_info=True)
            pass
        except RequestError:
            pass

        try:
            await self.conn.indices.close(index_name)
        except TransportError as e:
            logger.warn('Transport Error', exc_info=e)
        except ConnectionError:
            logger.warn('elasticsearch not installed', exc_info=True)
            pass
        except RequestError:
            pass

        try:
            await self.conn.indices.put_settings(index_settings, index_name)
            for key, value in mappings.items():
                await self.conn.indices.put_mapping(index_name, key, value)
        except TransportError as e:
            logger.warn('Transport Error', exc_info=e)
        except ConnectionError:
            logger.warn('elasticsearch not installed', exc_info=True)
            return
        except RequestError:
            return
        await self.conn.indices.open(index_name)
        await self.conn.cluster.health(wait_for_status='yellow')
        self.set_index_name(site, index_name)
Example #2
0
def get_mappings():
    from plone.server import app_settings
    mapping_overrides = app_settings.get('elasticsearch',
                                         {}).get('mapping_overrides', {})
    # Mapping calculated from schemas
    global_mappings = {}
    base_type_overrides = mapping_overrides.get('*', {})
    for name, schema in getUtilitiesFor(IResourceFactory):
        # For each type
        mappings = {}
        type_overrides = base_type_overrides.copy()
        type_overrides.update(mapping_overrides.get(name, {}))
        for field_name, catalog_info in get_index_fields(name).items():
            catalog_type = catalog_info.get('type', 'text')
            field_mapping = catalog_info.get('field_mapping', None)
            if field_mapping is None:
                field_mapping = CATALOG_TYPES[catalog_type]
            if field_name in type_overrides:
                field_mapping = type_overrides[field_name]
            mappings[field_name] = field_mapping
        global_mappings[name] = {'properties': mappings}
    return global_mappings
Example #3
0
    async def migrate_index(self, site, sndstep=False):
        index_name = self.get_index_name(site)
        version = self.get_version(site)
        mappings = get_mappings()
        index_settings = DEFAULT_SETTINGS.copy()
        index_settings.update(app_settings.get('index', {}))
        next_version = version + 1
        real_index_name = index_name + '_' + str(version)
        real_index_name_next_version = index_name + '_' + str(next_version)
        temp_index = index_name + '_' + str(next_version) + '_t'

        # Create and setup the new index
        # exists = await self.conn.indices.exists(index_name)
        # if exists:
        #     logger.warn('Canonical index exist')
        #     await self.conn.indices.delete(index_name)

        # Create and setup the new index
        if sndstep is False:
            exists = await self.conn.indices.exists(real_index_name_next_version)
            if exists:
                logger.warn('New version exist')
                await self.conn.indices.delete(real_index_name_next_version)

            exists = await self.conn.indices.exists(temp_index)
            conn_es = await self.conn.transport.get_connection()

            if exists:
                # There is a temp index so it needs to be reindex to the old one
                # Its been a failing reindexing
                logger.warn('Temp index exist')
                # Move aliases
                body = {
                    "actions": [
                        {"remove": {
                            "alias": index_name,
                            "index": temp_index
                        }},
                        {"add": {
                            "alias": index_name,
                            "index": real_index_name
                        }}
                    ]
                }
                conn_es = await self.conn.transport.get_connection()
                async with conn_es._session.post(
                            conn_es._base_url + '_aliases',
                            data=json.dumps(body),
                            timeout=1000000
                        ) as resp:
                    pass
                body = {
                  "source": {
                    "index": temp_index
                  },
                  "dest": {
                    "index": real_index_name
                  }
                }
                async with conn_es._session.post(
                            conn_es._base_url + '_reindex',
                            data=json.dumps(body)
                        ) as resp:
                    pass
                await self.conn.indices.delete(temp_index)

            await self.conn.indices.create(temp_index)
            await self.conn.indices.create(real_index_name_next_version)
            await self.conn.indices.close(real_index_name_next_version)
            await self.conn.indices.put_settings(
                index_settings, real_index_name_next_version)
            for key, value in mappings.items():
                await self.conn.indices.put_mapping(
                    real_index_name_next_version, key, value)
            await self.conn.indices.open(real_index_name_next_version)

            # Start to duplicate aliases
            body = {
                "actions": [
                    {"remove": {
                        "alias": index_name,
                        "index": real_index_name
                    }},
                    {"add": {
                        "alias": index_name,
                        "index": temp_index
                    }}
                ]
            }
            
            async with conn_es._session.post(
                        conn_es._base_url + '_aliases',
                        data=json.dumps(body),
                        timeout=1000000
                    ) as resp:
                pass
            logger.warn('Updated aliases')

            # Reindex
            body = {
              "source": {
                "index": real_index_name
              },
              "dest": {
                "index": real_index_name_next_version
              }
            }
            conn_es = await self.conn.transport.get_connection()
            async with conn_es._session.post(
                        conn_es._base_url + '_reindex',
                        data=json.dumps(body),
                        timeout=10000000
                    ) as resp:
                pass
            logger.warn('Reindexed')
            
        # Move aliases
        body = {
            "actions": [
                {"remove": {
                    "alias": index_name,
                    "index": temp_index
                }},
                {"add": {
                    "alias": index_name,
                    "index": real_index_name_next_version
                }}
            ]
        }
        conn_es = await self.conn.transport.get_connection()
        async with conn_es._session.post(
                    conn_es._base_url + '_aliases',
                    data=json.dumps(body),
                    timeout=1000000
                ) as resp:
            pass
        logger.warn('Updated aliases')
        self.set_version(site, next_version)

        # Reindex
        body = {
          "source": {
            "index": temp_index
          },
          "dest": {
            "index": real_index_name_next_version
          }
        }
        async with conn_es._session.post(
                    conn_es._base_url + '_reindex',
                    data=json.dumps(body)
                ) as resp:
            pass
        logger.warn('Reindexed temp')

        # Delete old index
        await self.conn.indices.close(real_index_name)
        await self.conn.indices.delete(real_index_name)
        await self.conn.indices.close(temp_index)
        await self.conn.indices.delete(temp_index)
Example #4
0
 def settings(self):
     settings = app_settings.get('mailer', {})
     settings.update(self._settings.get('mailer', {}))
     return settings
Example #5
0
def make_app(config_file=None, settings=None):
    app_settings.update(_delayed_default_settings)

    # Initialize aiohttp app
    app = web.Application(router=TraversalRouter())

    # Create root Application
    root = ApplicationRoot(config_file)
    root.app = app
    provideUtility(root, IApplication, 'root')

    # Initialize global (threadlocal) ZCA configuration
    app.config = ConfigurationMachine()
    registerCommonDirectives(app.config)

    if config_file is not None:
        with open(config_file, 'r') as config:
            settings = json.load(config)
    elif settings is None:
        raise Exception('Neither configuration or settings')

    import plone.server
    configure.include("zope.component")
    configure.include("zope.annotation")
    configure.include("plone.server", "meta.zcml")  # bbb
    configure.scan('plone.server.translation')
    configure.scan('plone.server.renderers')
    configure.scan('plone.server.api')
    configure.scan('plone.server.content')
    configure.scan('plone.server.security')
    configure.scan('plone.server.json')
    configure.scan('plone.server.behaviors')
    configure.scan('plone.server.languages')
    configure.scan('plone.server.permissions')
    configure.scan('plone.server.migrate.migrations')
    configure.scan('plone.server.auth.participation')
    configure.scan('plone.server.auth.principalrole')
    configure.scan('plone.server.catalog.index')
    configure.scan('plone.server.catalog.catalog')
    configure.scan('plone.server.framing')
    configure.scan('plone.server.file')
    configure.scan('plone.server.types')
    load_application(plone.server, root, settings)

    for ep in iter_entry_points('plone.server'):
        # auto-include applications
        # What an "app" include consists of...
        # 1. load zcml if present
        # 2. load "includeme" module function if present
        # 3. load app_settings dict if present in the module
        if ep.module_name not in settings.get('applications', []):
            continue

        load_application(ep.load(), root, settings)
    try:
        app.config.execute_actions()
    except ConfigurationConflictError as e:
        logger.error(str(e._conflicts))
        raise e

    # XXX we clear now to save some memory
    # it's unclear to me if this is necesary or not but it seems to me that
    # we don't need things registered in both components AND here.
    configure.clear()

    # update *after* plugins loaded
    update_app_settings(settings)

    content_type = ContentNegotiatorUtility(
        'content_type', app_settings['renderers'].keys())
    language = ContentNegotiatorUtility(
        'language', app_settings['languages'].keys())

    provideUtility(content_type, IContentNegotiation, 'content_type')
    provideUtility(language, IContentNegotiation, 'language')

    for database in app_settings['databases']:
        for key, dbconfig in database.items():
            factory = getUtility(
                IDatabaseConfigurationFactory, name=dbconfig['storage'])
            root[key] = factory(key, dbconfig)

    for static in app_settings['static']:
        for key, file_path in static.items():
            root[key] = StaticFile(file_path)

    root.set_root_user(app_settings['root_user'])

    if RSA is not None and not app_settings.get('rsa'):
        key = RSA.generate(2048)
        pub_jwk = {'k': key.publickey().exportKey('PEM')}
        priv_jwk = {'k': key.exportKey('PEM')}
        app_settings['rsa'] = {
            'pub': pub_jwk,
            'priv': priv_jwk
        }

    # Set router root
    app.router.set_root(root)

    for utility in getAllUtilitiesRegisteredFor(IAsyncUtility):
        # In case there is Utilties that are registered from zcml
        ident = asyncio.ensure_future(utility.initialize(app=app), loop=app.loop)
        root.add_async_utility(ident, {})

    app.on_cleanup.append(close_utilities)

    for util in app_settings['utilities']:
        root.add_async_utility(util)

    # Load cached Schemas
    load_cached_schema()

    return app
Example #6
0
def update_app_settings(settings):
    for key, value in settings.items():
        if isinstance(app_settings.get(key), dict):
            app_settings[key].update(value)
        else:
            app_settings[key] = value
Example #7
0
def make_app(config_file=None, settings=None):
    app_settings.update(_delayed_default_settings)

    # Initialize aiohttp app
    app = web.Application(
        router=TraversalRouter(),
        **settings.get('aiohttp_settings', {}))

    # Create root Application
    root = ApplicationRoot(config_file)
    root.app = app
    provideUtility(root, IApplication, 'root')

    # Initialize global (threadlocal) ZCA configuration
    app.config = ConfigurationMachine()
    registerCommonDirectives(app.config)

    if config_file is not None:
        with open(config_file, 'r') as config:
            settings = json.load(config)
    elif settings is None:
        raise Exception('Neither configuration or settings')

    import plone.server
    configure.include("zope.component")
    configure.include("zope.annotation")
    configure.include("plone.server", "meta.zcml")  # bbb
    configure.scan('plone.server.translation')
    configure.scan('plone.server.renderers')
    configure.scan('plone.server.api')
    configure.scan('plone.server.content')
    configure.scan('plone.server.auth')
    configure.scan('plone.server.json')
    configure.scan('plone.server.behaviors')
    configure.scan('plone.server.languages')
    configure.scan('plone.server.permissions')
    configure.scan('plone.server.migrate.migrations')
    configure.scan('plone.server.auth.checker')
    configure.scan('plone.server.auth.security_local')
    configure.scan('plone.server.auth.policy')
    configure.scan('plone.server.auth.participation')
    configure.scan('plone.server.catalog.index')
    configure.scan('plone.server.catalog.catalog')
    configure.scan('plone.server.framing')
    configure.scan('plone.server.file')
    configure.scan('plone.server.types')
    load_application(plone.server, root, settings)

    for ep in iter_entry_points('plone.server'):
        # auto-include applications
        # What an "app" include consists of...
        # 1. load zcml if present
        # 2. load "includeme" module function if present
        # 3. load app_settings dict if present in the module
        if ep.module_name not in settings.get('applications', []):
            continue

        load_application(ep.load(), root, settings)
    try:
        app.config.execute_actions()
    except ConfigurationConflictError as e:
        logger.error(str(e._conflicts))
        raise e

    # XXX we clear now to save some memory
    # it's unclear to me if this is necesary or not but it seems to me that
    # we don't need things registered in both components AND here.
    configure.clear()

    # update *after* plugins loaded
    update_app_settings(settings)

    content_type = ContentNegotiatorUtility(
        'content_type', app_settings['renderers'].keys())
    language = ContentNegotiatorUtility(
        'language', app_settings['languages'].keys())

    provideUtility(content_type, IContentNegotiation, 'content_type')
    provideUtility(language, IContentNegotiation, 'language')

    for database in app_settings['databases']:
        for key, dbconfig in database.items():
            factory = getUtility(
                IDatabaseConfigurationFactory, name=dbconfig['storage'])
            root[key] = factory(key, dbconfig)

    for static in app_settings['static']:
        for key, file_path in static.items():
            path = pathlib.Path(file_path)
            if path.is_dir():
                root[key] = StaticDirectory(path)
            else:
                root[key] = StaticFile(path)

    root.set_root_user(app_settings['root_user'])

    if RSA is not None and not app_settings.get('rsa'):
        key = RSA.generate(2048)
        pub_jwk = {'k': key.publickey().exportKey('PEM')}
        priv_jwk = {'k': key.exportKey('PEM')}
        app_settings['rsa'] = {
            'pub': pub_jwk,
            'priv': priv_jwk
        }

    # Set router root
    app.router.set_root(root)

    for utility in getAllUtilitiesRegisteredFor(IAsyncUtility):
        # In case there is Utilties that are registered from zcml
        ident = asyncio.ensure_future(utility.initialize(app=app), loop=app.loop)
        root.add_async_utility(ident, {})

    app.on_cleanup.append(close_utilities)

    for util in app_settings['utilities']:
        root.add_async_utility(util)

    # Load cached Schemas
    load_cached_schema()

    return app
Example #8
0
def update_app_settings(settings):
    for key, value in settings.items():
        if isinstance(app_settings.get(key), dict):
            app_settings[key].update(value)
        else:
            app_settings[key] = value