Exemplo n.º 1
0
def post_process_settings(settings):

    # check secret key
    assert 'SECRET_KEY' in settings and settings['SECRET_KEY'] is not None, "Set DJANGO__SECRET_KEY env var!"

    # Deal with custom setting for CELERY_DEFAULT_QUEUE.
    # Changing CELERY_DEFAULT_QUEUE only changes the queue name,
    # but we need it to change the exchange and routing_key as well.
    # See http://celery.readthedocs.org/en/latest/userguide/routing.html#changing-the-name-of-the-default-queue
    try:
        default_queue = settings['CELERY_DEFAULT_QUEUE']
        if default_queue != "celery":
            from kombu import Exchange, Queue
            settings['CELERY_QUEUES'] = (Queue(default_queue, Exchange(default_queue), routing_key=default_queue),)
    except KeyError:
        # no custom setting for CELERY_DEFAULT_QUEUE
        pass

    # add the named celerybeat jobs
    celerybeat_job_options = {
        # primary server
        'update-stats': {
            'task': 'perma.tasks.update_stats',
            'schedule': crontab(minute='*'),
        },
        'send-links-to-internet-archives': {
            'task': 'perma.tasks.upload_all_to_internet_archive',
            'schedule': crontab(minute='0', hour='*'),
        },
        'send-js-errors': {
            'task': 'perma.tasks.send_js_errors',
            'schedule': crontab(hour='10', minute='0', day_of_week=1)
        },
        'run-next-capture': {
            'task': 'perma.tasks.run_next_capture',
            'schedule': crontab(minute='*'),
        },
        'sync_subscriptions_from_perma_payments': {
            'task': 'perma.tasks.sync_subscriptions_from_perma_payments',
            'schedule': crontab(hour='23', minute='0')
        },
        'verify_webrecorder_api_available': {
            'task': 'perma.tasks.verify_webrecorder_api_available',
            'schedule': crontab(minute='*')
        }
    }
    settings['CELERYBEAT_SCHEDULE'] = dict(((job, celerybeat_job_options[job]) for job in settings.get('CELERYBEAT_JOB_NAMES', [])),
                                           **settings.get('CELERYBEAT_SCHEDULE', {}))

    # Count celery capture workers, by convention named w1, w2, etc.
    # At the moment, this is slow, so we do it once on application
    # start-up rather than at each load of the /manage/create page.
    # The call to inspector.active() takes almost two seconds.
    try:
        inspector = celery_inspect()
        active = inspector.active()
        settings['WORKER_COUNT'] = len([key for key in active.keys() if key.split('@')[0][0] == 'w']) if active else 0
    except TimeoutError:
        pass
Exemplo n.º 2
0
def list_tasks(task_id):
    #query = cstate.State()
    stats = celery_inspect(['celery', 'lynx'])
    print("TASK active:", stats.active())
    print("TASK scheduled:", stats.scheduled())
    print("TASK reserved:", stats.reserved())
    print("TASK revoked:", stats.revoked())
    print("TASK registered:", stats.registered())
Exemplo n.º 3
0
def post_process_settings(settings):

    # check secret key
    assert 'SECRET_KEY' in settings and settings[
        'SECRET_KEY'] is not None, "Set DJANGO__SECRET_KEY env var!"

    # Deal with custom setting for CELERY_DEFAULT_QUEUE.
    # Changing CELERY_DEFAULT_QUEUE only changes the queue name,
    # but we need it to change the exchange and routing_key as well.
    # See http://celery.readthedocs.org/en/latest/userguide/routing.html#changing-the-name-of-the-default-queue
    try:
        default_queue = settings['CELERY_DEFAULT_QUEUE']
        if default_queue != "celery":
            from kombu import Exchange, Queue
            settings['CELERY_QUEUES'] = (Queue(default_queue,
                                               Exchange(default_queue),
                                               routing_key=default_queue), )
    except KeyError:
        # no custom setting for CELERY_DEFAULT_QUEUE
        pass

    # add the named celerybeat jobs
    celerybeat_job_options = {
        # primary server
        'update-stats': {
            'task': 'perma.tasks.update_stats',
            'schedule': crontab(minute='*'),
        },
        'send-links-to-internet-archives': {
            'task': 'perma.tasks.upload_all_to_internet_archive',
            'schedule': crontab(minute='0', hour='*'),
        },
        'send-js-errors': {
            'task': 'perma.tasks.send_js_errors',
            'schedule': crontab(hour='10', minute='0', day_of_week=1)
        },
        'run-next-capture': {
            'task': 'perma.tasks.run_next_capture',
            'schedule': crontab(minute='*'),
        }
    }
    settings['CELERYBEAT_SCHEDULE'] = dict(
        ((job, celerybeat_job_options[job])
         for job in settings.get('CELERYBEAT_JOB_NAMES', [])),
        **settings.get('CELERYBEAT_SCHEDULE', {}))

    # Count celery capture workers, by convention named w1, w2, etc.
    # At the moment, this is slow, so we do it once on application
    # start-up rather than at each load of the /manage/create page.
    # The call to inspector.active() takes almost two seconds.
    try:
        inspector = celery_inspect()
        active = inspector.active()
        settings['WORKER_COUNT'] = len(
            [key for key in active.keys()
             if key.split('@')[0][0] == 'w']) if active else 0
    except TimeoutError:
        pass
Exemplo n.º 4
0
def create_link(request):

    deleted = request.GET.get('deleted', '')
    if deleted:
        try:
            link = Link.objects.all_with_deleted().get(guid=deleted)
        except Link.DoesNotExist:
            link = None
        if link:
            messages.add_message(request, messages.INFO,
                                 'Deleted - ' + link.submitted_title)

    # count celery capture workers, by convention named w1, w2, etc.
    inspector = celery_inspect()
    active = inspector.active()
    workers = len(
        [key for key in active.keys()
         if key.split('@')[0][0] == 'w']) if active else 0

    # approximate 'average' capture time during last 24 hrs
    # based on manage/stats
    capture_time_fields = CaptureJob.objects.filter(
        link__creation_timestamp__gt=(timezone.now() - timedelta(days=1)),
        link__creation_timestamp__lt=(timezone.now())).values(
            'capture_start_time', 'link__creation_timestamp',
            'capture_end_time').exclude(capture_start_time=None).exclude(
                capture_end_time=None)
    if capture_time_fields:
        ctf_len = len(capture_time_fields)
        capture_times = sorted(c['capture_end_time'] - c['capture_start_time']
                               for c in capture_time_fields)
        average = capture_times[int(ctf_len * .5)].total_seconds()
    else:
        average = 1

    return render(
        request, 'user_management/create-link.html', {
            'this_page':
            'create_link',
            'links_remaining':
            request.user.get_links_remaining(),
            'suppress_reminder':
            'true' if 'url' in request.GET else
            request.COOKIES.get('suppress_reminder'),
            'max_size':
            settings.MAX_ARCHIVE_FILE_SIZE / 1024 / 1024,
            'workers':
            workers,
            'average':
            average
        })
Exemplo n.º 5
0
def inspect():
    """ Inspect is annoyingly unreliable and has a default 1 second timeout. """
    # this import appears to need to come after the celery app is loaded, class is dynamic.

    if processing_celery_app is None or push_send_celery_app is None:
        raise CeleryNotRunningException()

    from celery.task.control import inspect as celery_inspect
    now = datetime.now()
    fail_time = now + timedelta(seconds=20)

    while now < fail_time:
        try:
            return celery_inspect(timeout=0.1)
        except CeleryNotRunningException:
            now = datetime.now()
            continue

    raise CeleryNotRunningException()
Exemplo n.º 6
0
def celery_queue_status(request):
    """
    A simple report of how many tasks are in the main and background celery queues,
    what tasks are being processed by which workers, and how many tasks each worker
    has completed.

    Given:
    >>> from main.tasks import demo_scheduled_task
    >>> _, client, admin_user = [getfixture(i) for i in ['celery_worker', 'client', 'admin_user']]
    >>> _ = demo_scheduled_task.apply_async()

    The page returns and correctly reports the task was completed.
    >>> check_response(client.get(reverse('celery_queue_status'), as_user=admin_user), content_includes=
    ...     'class="finished">main.tasks.demo_scheduled_task:'
    ... )
    """
    inspector = celery_inspect()
    active = inspector.active()
    reserved = inspector.reserved()
    stats = inspector.stats()

    queues = []
    if active is not None:
        for queue in sorted(active.keys()):
            try:
                queues.append({
                    'name': queue,
                    'active': active[queue],
                    'reserved': reserved[queue],
                    'stats': stats[queue],
                })
            except KeyError:
                pass

    r = redis.from_url(settings.CELERY_BROKER_URL)

    return render(
        request, 'manage/celery.html', {
            'queues': queues,
            'total_main_queue': r.llen('celery'),
            'total_background_queue': r.llen('background')
        })
Exemplo n.º 7
0
def inspect(*args, **kwargs):
    if 'timeout' not in kwargs:
        kwargs['timeout'] = DEFAULT_INSPECT_TIMEOUT
    return celery_inspect(*args, **kwargs)