Exemplo n.º 1
0
 def wait_for_es(self):
     if self.app.config.get('ELASTICSEARCH_URL'):
         time.sleep(2)
         helpers.full_user_import()
         helpers.full_channel_import()
         helpers.full_video_import()
         time.sleep(2)
Exemplo n.º 2
0
def import_to_es(prefix=None, **kwargs):
    """Import data into elasticsearch from the db"""
    # NOTE: change this to be sensible
    from rockpack.mainsite.core.es import helpers
    i = helpers.DBImport()

    if kwargs['terms_for_channel_only']:
        i.import_video_channel_terms()
        return

    if kwargs['stars_only']:
        i.import_video_stars()
        return

    if kwargs['restrictions_only']:
        i.import_video_restrictions()
        return

    if kwargs['shares_only']:
        i.import_channel_share()
        return

    if kwargs['suggestions_only']:
        i.import_search_suggestions()
        return

    if not (kwargs['videos_only'] or kwargs['users_only']):
        helpers.full_channel_import()
    if not (kwargs['channels_only'] or kwargs['users_only']):
        helpers.full_video_import(prefix=prefix)
    if not (kwargs['channels_only'] or kwargs['videos_only']):
        helpers.full_user_import()
Exemplo n.º 3
0
def pytest_configure(config):
    from rockpack.mainsite.core.es import mappings
    mappings.CHANNEL_ALIAS = mappings.CHANNEL_INDEX = 'test_channel'
    mappings.VIDEO_ALIAS = mappings.VIDEO_INDEX = 'test_video'
    mappings.USER_ALIAS = mappings.USER_INDEX = 'test_user'

    from rockpack.mainsite import app, init_app

    app.config['TESTING'] = True
    app.config['FORCE_INDEX_INSERT_REFRESH'] = True
    app.config['DATABASE_URL'] = app.config.get('TEST_DATABASE_URL', 'sqlite://')

    # import after setting DATABASE_URL
    from rockpack.mainsite.core import dbapi

    if app.config.get('ELASTICSEARCH_URL'):
        from rockpack.mainsite.core.es import helpers

        helpers.Indexing.create_all_indexes(rebuild=True)
        helpers.Indexing.create_all_mappings()

    if 'sqlite:' in app.config['DATABASE_URL']:
        connection = dbapi.db.engine.raw_connection().connection
        # Seems to be required for sub-transaction support:
        connection.isolation_level = None
        # Use group_concat instead of string_agg
        from sqlalchemy import func
        func.string_agg = func.group_concat
        # For compatibility with postgres. XXX: can't return timedelta :-(
        from datetime import datetime
        connection.create_function('age', 1, lambda d: None)
        # substitute postgres-specific "interval" expression
        from rockpack.mainsite.services.user import api
        from sqlalchemy import text
        api.SUBSCRIPTION_VIDEO_FEED_THRESHOLD = text("datetime('now')")
        api.ACTIVITY_LAST_ACTION_COMPARISON = "action = '%s'"

    dbapi.sync_database(drop_all=True)

    from wonder.common import timing
    timing.log.level = 50

    from test.test_helpers import install_mocks
    from test.fixtures import install, all_data
    install_mocks()
    init_app()
    # Explicityly load admin tables after app is loaded.
    dbapi.sync_database(custom_modules=('rockpack.mainsite.admin', 'rockpack.mainsite.admin.auth', ))
    install(*all_data)

    if app.config.get('ELASTICSEARCH_URL'):
        helpers.full_user_import()
        helpers.full_channel_import()
        helpers.full_video_import()
Exemplo n.º 4
0
def update_indexes():
    """Updates all data in all indexes"""
    job_control = JobControl.query.get('update_indexes')
    start = job_control.last_run
    stop = start + timedelta(seconds=60)
    delay = datetime.utcnow() - stop
    if delay < timedelta(0):
        return

    app.logger.info('update_indexes: from %s to %s', start, stop)

    start_time = time.time()
    helpers.full_user_import(start=start, stop=stop)
    helpers.full_channel_import(start=start, stop=stop)
    helpers.full_video_import(start=start, stop=stop)

    app.logger.info('update_indexes: ran in %ds: delay %ds',
                    time.time() - start_time, delay.total_seconds())
    record_timing('cron_processor.update_indexes.delay', delay.total_seconds())

    job_control.last_run = stop