Beispiel #1
0
 def setUp(self):
     user = User.objects.create_user('foo', password='******')
     user.is_staff = True
     user.is_active = True
     user.save()
     self.client = Client()
     self.client.login(username=user.username, password='******')
     get_queue('django_rq_test').connection.flushall()
Beispiel #2
0
    def test_async(self):
        """
        Checks whether asynchronous settings work
        """
        # Make sure async is not set by default
        defaultQueue = get_queue('default')
        self.assertTrue(defaultQueue._async)

        # Make sure async override works
        defaultQueueAsync = get_queue('default', async=False)
        self.assertFalse(defaultQueueAsync._async)

        # Make sure async setting works
        asyncQueue = get_queue('async')
        self.assertFalse(asyncQueue._async)
Beispiel #3
0
def job(func_or_queue=None, connection=None, *args, **kwargs):
    """
    The same as RQ's job decorator, but it works automatically works out
    the ``connection`` argument from RQ_QUEUES.

    And also, it allows simplified ``@job`` syntax to put job into
    default queue.
    """
    if callable(func_or_queue):
        func = func_or_queue
        queue = 'default'
    else:
        func = None
        queue = func_or_queue or 'default'

    if not isinstance(queue, basestring):
        queue = unicode(queue)

    try:
        queue = get_queue(queue)
        if connection is None:
            connection = queue.connection
    except KeyError:
        pass

    decorator = _job(queue, connection=connection, *args, **kwargs)
    if func:
        return decorator(func)
    return decorator
Beispiel #4
0
    def test_action_requeue_jobs(self):
        def failing_job():
            raise ValueError

        queue = get_queue('django_rq_test')
        failed_queue_index = get_failed_queue_index('django_rq_test')

        # enqueue some jobs that will fail
        jobs = []
        job_ids = []
        for _ in range(0, 3):
            job = queue.enqueue(failing_job)
            jobs.append(job)
            job_ids.append(job.id)

        # do those jobs = fail them
        worker = get_worker('django_rq_test')
        worker.work(burst=True)

        # check if all jobs are really failed
        for job in jobs:
            self.assertTrue(job.is_failed)

        # renqueue failed jobs from failed queue
        self.client.post(reverse('rq_actions', args=[failed_queue_index]),
                         {'action': 'requeue', 'job_ids': job_ids})

        # check if we requeue all failed jobs
        for job in jobs:
            self.assertFalse(job.is_failed)
    def job(func_or_queue, connection=None, *args, **kwargs):
        """
        The same as RQ's job decorator, but it works automatically works out
        the ``connection`` argument from RQ_QUEUES.

        And also, it allows simplified ``@job`` syntax to put job into
        default queue.
        """
        if callable(func_or_queue):
            func = func_or_queue
            queue = 'default'
        else:
            func = None
            queue = func_or_queue

        try:
            from django.utils import six
            string_type = six.string_types
        except ImportError:
            # for django lt v1.5 and python 2
            string_type = basestring

        if isinstance(queue, string_type):
            try:
                queue = get_queue(queue)
                if connection is None:
                    connection = queue.connection
            except KeyError:
                pass

        decorator = _rq_job(queue, connection=connection, *args, **kwargs)
        if func:
            return decorator(func)
        return decorator
Beispiel #6
0
def check_redis(app_configs=None, **kwargs):
    from django_rq.queues import get_queue
    from django_rq.workers import Worker

    errors = []

    try:
        queue = get_queue()
        workers = Worker.all(queue.connection)
    except Exception as e:
        conn_settings = queue.connection.connection_pool.connection_kwargs
        errors.append(checks.Critical(_("Could not connect to Redis (%s)") % (e),
            hint=_("Make sure Redis is running on %(host)s:%(port)s") % (conn_settings),
            id="pootle.C001",
        ))
    else:
        if not workers or workers[0].stopped:
            # We need to check we're not running manage.py rqworker right now..
            import sys
            if len(sys.argv) > 1 and sys.argv[1] in RQWORKER_WHITELIST:
                errors.append(checks.Warning(
                    _("No RQ Worker running."),
                    hint=_("Run new workers with manage.py rqworker"),
                    id="pootle.W001",
                ))

        redis_version = queue.connection.info()["redis_version"].split(".")
        if tuple(int(x) for x in redis_version) < REDIS_MINIMUM_REQUIRED_VERSION:
            errors.append(checks.Warning(
                _("Your version of Redis is too old."),
                hint=_("Update your system's Redis server package"),
                id="pootle.W002",
            ))

    return errors
Beispiel #7
0
 def test_clear(self):
     queue = get_queue()
     delayed_queue = thread_queue.get_queue()
     delayed_queue.append((queue, divide, (1,), {'b': 1}))
     thread_queue.clear()
     delayed_queue = thread_queue.get_queue()
     self.assertEqual(delayed_queue, [])
Beispiel #8
0
 def test_success(self):
     queue = get_queue()
     queue.empty()
     thread_queue.clear()
     self.assertEqual(queue.count, 0)
     self.client.get(reverse('success'))
     self.assertEqual(queue.count, 1)
Beispiel #9
0
 def test_error(self):
     queue = get_queue()
     queue.empty()
     self.assertEqual(queue.count, 0)
     url = reverse('error')
     self.assertRaises(ValueError, self.client.get, url)
     self.assertEqual(queue.count, 0)
Beispiel #10
0
def rq_stats():
    queue = get_queue()
    failed_queue = get_failed_queue()
    try:
        workers = Worker.all(queue.connection)
    except ConnectionError:
        return None

    num_workers = len(workers)
    is_running = len(queue.connection.smembers(Worker.redis_workers_keys)) > 0
    if is_running:
        # Translators: this refers to the status of the background job worker
        status_msg = ungettext('Running (%d worker)', 'Running (%d workers)',
                               num_workers) % num_workers
    else:
        # Translators: this refers to the status of the background job worker
        status_msg = _('Stopped')

    result = {
        'job_count': queue.count,
        'failed_job_count': failed_queue.count,
        'is_running': is_running,
        'status_msg': status_msg,
    }

    return result
Beispiel #11
0
 def test_get_queue_django_redis(self, mocked):
     """
     Test that the USE_REDIS_CACHE option for configuration works.
     """
     queue = get_queue('django-redis')
     queue.enqueue(access_self)
     self.assertEqual(len(queue), 1)
     self.assertEqual(mocked.call_count, 1)
Beispiel #12
0
    def test_to_localtime(self):
        with self.settings(TIME_ZONE='Asia/Jakarta'):
            queue = get_queue()
            job = queue.enqueue(access_self)
            time = to_localtime(job.created_at)

            self.assertIsNotNone(time.tzinfo)
            self.assertEqual(time.strftime("%z"), '+0700')
Beispiel #13
0
 def test_clear_queue(self):
     """Test that the queue clear actually clears the queue."""
     queue = get_queue("django_rq_test")
     queue_index = get_queue_index("django_rq_test")
     job = queue.enqueue(access_self)
     self.client.post(reverse("rq_clear", args=[queue_index]), {"post": "yes"})
     self.assertFalse(Job.exists(job.id, connection=queue.connection))
     self.assertNotIn(job.id, queue.job_ids)
Beispiel #14
0
def enqueue(func, *args, **kwargs):
    """
    A convenience function to put a job in the default queue. Usage::

    from django_rq import enqueue
    enqueue(func, *args, **kwargs)
    """
    return get_queue().enqueue(func, *args, **kwargs)
Beispiel #15
0
def create_update_cache_job_wrapper(instance, keys, decrement=1):
    queue = get_queue('default')
    if queue._async:

        def _create_update_cache_job():
            create_update_cache_job(queue, instance, keys, decrement=decrement)
        connection.on_commit(_create_update_cache_job)
    else:
        instance._update_cache_job(keys, decrement=decrement)
Beispiel #16
0
    def test_autocommit(self):
        """
        Checks whether autocommit is set properly.
        """
        queue = get_queue(autocommit=True)
        self.assertTrue(queue._autocommit)
        queue = get_queue(autocommit=False)
        self.assertFalse(queue._autocommit)
        # Falls back to default AUTOCOMMIT mode
        queue = get_queue()
        self.assertFalse(queue._autocommit)

        queues = get_queues(autocommit=True)
        self.assertTrue(queues[0]._autocommit)
        queues = get_queues(autocommit=False)
        self.assertFalse(queues[0]._autocommit)
        queues = get_queues()
        self.assertFalse(queues[0]._autocommit)
Beispiel #17
0
 def test_enqueue_autocommit_on(self):
     """
     Running ``enqueue`` when AUTOCOMMIT is on should
     immediately persist job into Redis.
     """
     queue = get_queue()
     job = queue.enqueue(divide, 1, 1)
     self.assertTrue(job.id in queue.job_ids)
     job.delete()
Beispiel #18
0
def rq_workers_are_running():
    """Checks if there are any rq workers running

    :returns: `True` if there are rq workers running, `False` otherwise.
    """
    if redis_is_running():
        queue = get_queue()
        if len(queue.connection.smembers(Worker.redis_workers_keys)):
            return True
    return False
Beispiel #19
0
 def test_get_current_job(self):
     """
     Ensure that functions using RQ's ``get_current_job`` doesn't fail
     when run from rqworker (the job id is not in the failed queue).
     """
     queue = get_queue()
     job = queue.enqueue(access_self)
     call_command('rqworker', burst=True)
     failed_queue = Queue(name='failed', connection=queue.connection)
     self.assertFalse(job.id in failed_queue.job_ids)
Beispiel #20
0
 def test_job_decorator(self):
     # Ensure that decorator passes in the right queue from settings.py
     queue_name = 'test3'
     config = QUEUES[queue_name]
     @job(queue_name)
     def test():
         pass
     result = test.delay()
     queue = get_queue(queue_name)
     self.assertEqual(result.origin, queue_name)
Beispiel #21
0
def commit(*args, **kwargs):
    """
    Processes all jobs in the delayed queue.
    """
    delayed_queue = get_queue()
    try:
        while delayed_queue:
            queue, args, kwargs = delayed_queue.pop(0)
            queue.original_enqueue_call(*args, **kwargs)
    finally:
        clear()
Beispiel #22
0
 def test_delete_job(self):
     """
     In addition to deleting job from Redis, the job id also needs to be
     deleted from Queue.
     """
     queue = get_queue('django_rq_test')
     job = queue.enqueue(access_self)
     self.client.post(reverse('rq_delete_job', args=[queue.connection_name, queue.name, job.id]),
                      {'post': 'yes'})
     self.assertFalse(Job.exists(job.id, connection=queue.connection))
     self.assertNotIn(job.id, queue.job_ids)
Beispiel #23
0
 def test_delete_job(self):
     """
     In addition to deleting job from Redis, the job id also needs to be
     deleted from Queue.
     """
     queue = get_queue("django_rq_test")
     queue_index = get_queue_index("django_rq_test")
     job = queue.enqueue(access_self)
     self.client.post(reverse("rq_delete_job", args=[queue_index, job.id]), {"post": "yes"})
     self.assertFalse(Job.exists(job.id, connection=queue.connection))
     self.assertNotIn(job.id, queue.job_ids)
Beispiel #24
0
def redis_is_running():
    """Checks is redis is running

    :returns: `True` if redis is running, `False` otherwise.
    """
    try:
        queue = get_queue()
        Worker.all(queue.connection)
    except ConnectionError:
        return False
    return True
Beispiel #25
0
 def test_get_queue_test(self):
     """
     Test that get_queue use the right parameters for `test`
     connection.
     """
     config = QUEUES["test"]
     queue = get_queue("test")
     connection_kwargs = queue.connection.connection_pool.connection_kwargs
     self.assertEqual(queue.name, "test")
     self.assertEqual(connection_kwargs["host"], config["HOST"])
     self.assertEqual(connection_kwargs["port"], config["PORT"])
     self.assertEqual(connection_kwargs["db"], config["DB"])
Beispiel #26
0
 def test_get_queue_test(self):
     """
     Test that get_queue use the right parameters for `test`
     connection.
     """
     config = QUEUES['test']
     queue = get_queue('test')
     connection_kwargs = queue.connection.connection_pool.connection_kwargs
     self.assertEqual(queue.name, 'test')
     self.assertEqual(connection_kwargs['host'], config['HOST'])
     self.assertEqual(connection_kwargs['port'], config['PORT'])
     self.assertEqual(connection_kwargs['db'], config['DB'])
Beispiel #27
0
    def test_worker_details(self):
        """Worker index page should show workers for a specific queue"""
        queue = get_queue('django_rq_test')
        queue_index = get_queue_index('django_rq_test')

        worker = get_worker('django_rq_test', name=uuid.uuid4().hex)
        worker.register_birth()

        response = self.client.get(
            reverse('rq_worker_details', args=[queue_index, worker.key])
        )
        self.assertEqual(response.context['worker'], worker)
Beispiel #28
0
    def test_deferred_jobs(self):
        """Ensure that active jobs page works properly."""
        queue = get_queue('django_rq_test')
        queue_index = get_queue_index('django_rq_test')

        job = queue.enqueue(access_self)
        registry = DeferredJobRegistry(queue.name, queue.connection)
        registry.add(job, 2)
        response = self.client.get(
            reverse('rq_deferred_jobs', args=[queue_index])
        )
        self.assertEqual(response.context['jobs'], [job])
Beispiel #29
0
def get_queues(*queue_names, **kwargs):
    """
    Return queue instances from specified queue names.
    All instances must use the same Redis connection.
    """
    from .settings import QUEUES

    autocommit = kwargs.get("autocommit", None)
    if len(queue_names) == 0:
        # Return "default" queue if no queue name is specified
        return [get_queue(autocommit=autocommit)]
    if len(queue_names) > 1:
        connection_params = QUEUES[queue_names[0]]
        for name in queue_names:
            if QUEUES[name] != connection_params:
                raise ValueError(
                    "Queues must have the same redis connection."
                    '"{0}" and "{1}" have '
                    "different connections".format(name, queue_names[0])
                )
    return [get_queue(name, autocommit=autocommit) for name in queue_names]
Beispiel #30
0
 def test_get_connection_url(self):
     """
     Test that get_queue use the right parameters for queues using URL for
     connection.
     """
     queue = get_queue('default', connection_name='url')
     connection_kwargs = queue.connection.connection_pool.connection_kwargs
     self.assertEqual(queue.name, 'default')
     self.assertEqual(connection_kwargs['host'], 'host')
     self.assertEqual(connection_kwargs['port'], 1234)
     self.assertEqual(connection_kwargs['db'], 4)
     self.assertEqual(connection_kwargs['password'], 'password')
Beispiel #31
0
 def test_for_queue(self):
     queue = get_queue('test1')
     self.assertIsInstance(queue, DummyQueue)
Beispiel #32
0
def test_rq_workers_running():
    from django_rq.queues import get_queue
    from django_rq.workers import Worker
    queue = get_queue()
    workers = Worker.all(queue.connection)
    return len(workers) >= 1 and not workers[0].stopped, len(workers)
 def tearDown(self):
     get_queue().empty()
Beispiel #34
0
 def test_default_timeout(self):
     """Ensure DEFAULT_TIMEOUT are properly parsed."""
     queue = get_queue()
     self.assertEqual(queue._default_timeout, 500)
     queue = get_queue('test1')
     self.assertEqual(queue._default_timeout, 400)
Beispiel #35
0
def upload_view(request):
    """
    :param request: Django Request
    :return: Http Response
    """
    if preferences.Setting.active_release is None:
        all_msgs = messages.get_messages(request)
        if len(all_msgs) == 0:
            messages.error(request,
                           mark_safe(
                               _("Active release not set: you cannot publish your "
                                 "repository without an active release. <a href=\"%s\">Add Release</a>")
                               % reverse("admin:WEIPDCRM_release_add")
                           ))
    # POST
    if request.method == 'POST':
        # action: upload
        if 'action' in request.POST and request.POST['action'] == 'upload':
            if 'ajax' in request.POST and request.POST['ajax'] == 'true':
                result_dict = {}
                if 'job' in request.POST:
                    job_id = request.POST['job']
                    result_dict = {}
                    m_job = queues.get_queue('high').fetch_job(job_id)
                    if m_job is None:
                        result_dict.update({
                            'result': False,
                            'msg': _('No such job'),
                            'job': None
                        })
                    else:
                        result_dict.update({
                            'result': True,
                            'msg': '',
                            'job': {
                                'id': m_job.id,
                                'is_failed': m_job.is_failed,
                                'is_finished': m_job.is_finished,
                                'result': m_job.result
                            }
                        })
                else:
                    form = UploadForm(request.POST, request.FILES)
                    if form.is_valid():
                        # Handle File
                        if settings.ENABLE_REDIS is True:
                            m_job = handle_uploaded_file(request)
                            result_dict.update({
                                'status': True,
                                'msg': _('Upload succeed, proceeding...'),
                                'job': {
                                    'id': m_job.id,
                                    'result': m_job.result
                                }
                            })
                        else:
                            m_result = handle_uploaded_file(request)
                            succeed = m_result['success']
                            if succeed:
                                result_dict.update({
                                    'status': True,
                                    'msg': _('Upload succeed, proceeding...'),
                                    'job': {
                                        'id': None,
                                        'result': {
                                            'version': m_result['version']
                                        }
                                    }
                                })
                            else:
                                result_dict.update({
                                    'status': False,
                                    'msg': m_result['exception'],
                                    'job': None
                                })
                    else:
                        result_dict.update({
                            'status': False,
                            'msg': _('Upload failed, invalid form.'),
                            'job': None
                        })
                return HttpResponse(json.dumps(result_dict), content_type='application/json')
            else:
                # render upload result
                form = UploadForm(request.POST, request.FILES)
                if form.is_valid():
                    # Handle File
                    if settings.ENABLE_REDIS is True:
                        m_job = handle_uploaded_file(request)
                        job_id = m_job.id
                        msg = _('Upload succeed, proceeding...')
                    else:
                        m_result = handle_uploaded_file(request)
                        if m_result["success"] is True:
                            return redirect(Version.objects.get(id=int(m_result["version"])).get_admin_url())
                        else:
                            job_id = ''
                            msg = m_result["exception"]
                else:
                    job_id = ''
                    msg = _('Upload failed, invalid form.')
                form = UploadForm()
                context = admin.site.each_context(request)
                context.update({
                    'title': _('Upload New Packages'),
                    'form': form,
                    'job_id': job_id,
                    'msg': msg
                })
                template = 'admin/upload.html'
                return render(request, template, context)
        # action: async-import
        elif 'action' in request.POST and request.POST['action'] == 'async-import':
            if not settings.ENABLE_REDIS:
                messages.error(request, mark_safe(
                    _("To use this action, you must enable <b>Redis Queue</b>.")
                ))
            else:
                items = os.listdir(settings.UPLOAD_ROOT)
                import_items = []
                for item in items:
                    if item[-4:] == ".deb":
                        item_path = os.path.join(settings.UPLOAD_ROOT, item)
                        import_items.append(item_path)
                if len(import_items) > 0:
                    temp_root = settings.TEMP_ROOT
                    if not os.path.exists(temp_root):
                        try:
                            mkdir_p(temp_root)
                        except OSError:
                            pass
                    import_jobs = []
                    queue = django_rq.get_queue('high')
                    for import_item in import_items:
                        package_temp_path = os.path.join(temp_root, str(uuid.uuid1()) + '.deb')
                        shutil.copy(import_item, package_temp_path)
                        os.chmod(package_temp_path, 0o755)
                        import_job = queue.enqueue(handle_uploaded_package, package_temp_path)
                        import_jobs.append(import_job)
                    if len(import_jobs) == 1:
                        messages.info(request, mark_safe(_("{job_count} package importing job have been added to the \"<a href=\"{jobs}\">high</a>\" queue.").format(
                            job_count=str(len(import_jobs)),
                            jobs=reverse('rq_jobs', args=(1, )),
                        )))
                    else:
                        messages.info(request, mark_safe(_("{job_count} package importing jobs have been added to the \"<a href=\"{jobs}\">high</a>\" queue.").format(
                            job_count=str(len(import_jobs)),
                            jobs=reverse('rq_jobs', args=(1, )),
                        )))
                else:
                    messages.warning(request, _("There is no package to import."))
            return redirect('upload')
    # GET
    elif request.method == 'GET':
        form = UploadForm()
        context = admin.site.each_context(request)
        context.update({
            'title': _('Upload New Packages'),
            'form': form,
            'job_id': ''
        })
        template = 'admin/upload.html'
        return render(request, template, context)
Beispiel #36
0
 def test_default_queue_class(self):
     queue = get_queue('test')
     self.assertIsInstance(queue, DjangoRQ)
Beispiel #37
0
 def _enqueue_job():
     queue = get_queue('default')
     queue.enqueue(update_translation_project, tp,
                   initialize_from_templates, response_url)
Beispiel #38
0
def upload_screenshots_view(request, package_id):
    """
    :param request: Django Request
    :return: Http Response
    """
    if request.method == "POST":
        # Save Images To Resource Base
        if 'ajax' in request.POST and request.POST['ajax'] == 'true':
            result_dict = {}
            if 'job' in request.POST:
                job_id = request.POST['job']
                result_dict = {}
                m_job = queues.get_queue('high').fetch_job(job_id)
                if m_job is None:
                    result_dict.update({
                        'result': False,
                        'msg': _('No such job'),
                        'job': None
                    })
                else:
                    result_dict.update({
                        'result': True,
                        'msg': '',
                        'job': {
                            'id': m_job.id,
                            'is_failed': m_job.is_failed,
                            'is_finished': m_job.is_finished,
                            'result': m_job.result
                        }
                    })
            else:
                form = ImageForm(request.POST, request.FILES)
                if form.is_valid():
                    # Handle File

                    if settings.ENABLE_REDIS is True:
                        m_job = handle_uploaded_image(request, package_id)
                        result_dict.update({
                            'status':
                            True,
                            'msg':
                            _('Upload succeed, proceeding...'),
                            'job': {
                                'id': m_job.id,
                                'result': m_job.result
                            }
                        })
                    else:
                        m_result = handle_uploaded_image(request, package_id)
                        succeed = m_result['success']
                        if succeed:
                            result_dict.update({
                                'status':
                                True,
                                'msg':
                                _('Upload succeed, proceeding...'),
                                'job': {
                                    'id': None,
                                    'result': {
                                        'version': m_result['version']
                                    }
                                }
                            })
                        else:
                            result_dict.update({
                                'status': False,
                                'msg': m_result['exception'],
                                'job': None
                            })
                else:
                    result_dict.update({
                        'status': False,
                        'msg': _('Upload failed, invalid form.'),
                        'job': None
                    })
            return HttpResponse(json.dumps(result_dict),
                                content_type='application/json')
        else:
            # render upload result
            form = UploadForm(request.POST, request.FILES)
            if form.is_valid():
                # Handle File
                if settings.ENABLE_REDIS is True:
                    m_job = handle_uploaded_image(request, package_id)
                    job_id = m_job.id
                    msg = _('Upload succeed, proceeding...')
                else:
                    m_result = handle_uploaded_image(request, package_id)
                    if m_result["success"] is True:
                        return redirect(
                            Version.objects.get(
                                id=int(m_result["version"])).get_admin_url())
                    else:
                        job_id = ''
                        msg = m_result["exception"]
            else:
                job_id = ''
                msg = _('Upload failed, invalid form.')
            form = ImageForm()
            context = admin.site.each_context(request)
            context.update({
                'title': _('Upload Screenshots'),
                'form': form,
                'job_id': job_id,
                'msg': msg
            })
            template = 'admin/upload_image.html'
            return render(request, template, context)
    else:
        version = Version.objects.get(id=int(package_id))
        name = version.c_name + " " + version.c_version
        form = ImageForm()
        context = admin.site.each_context(request)
        context.update({
            'title': _('Upload Screenshots'),
            'form': form,
            'drop_title': name,
            'job_id': ''
        })
        template = 'admin/upload_image.html'

        return render(request, template, context)
Beispiel #39
0
 def test_in_kwargs(self):
     queue = get_queue('test', queue_class=DummyQueue)
     self.assertIsInstance(queue, DummyQueue)
Beispiel #40
0
def upload_view(request):
    if preferences.Setting.active_release is None:
        messages.error(
            request,
            _("Active release not set: you cannot publish your "
              "repository without an active release."))
    """
    :param request: Django Request
    :return: Http Response
    """
    if request.method == 'POST':
        # Save Package File To Resource Base
        if 'ajax' in request.POST and request.POST['ajax'] == 'true':
            result_dict = {}
            if 'job' in request.POST:
                job_id = request.POST['job']
                result_dict = {}
                m_job = queues.get_queue('high').fetch_job(job_id)
                if m_job is None:
                    result_dict.update({
                        'result': False,
                        'msg': _('No such job'),
                        'job': None
                    })
                else:
                    result_dict.update({
                        'result': True,
                        'msg': '',
                        'job': {
                            'id': m_job.id,
                            'is_failed': m_job.is_failed,
                            'is_finished': m_job.is_finished,
                            'result': m_job.result
                        }
                    })
            else:
                form = UploadForm(request.POST, request.FILES)
                if form.is_valid():
                    # Handle File
                    if settings.ENABLE_REDIS is True:
                        m_job = handle_uploaded_file(request)
                        result_dict.update({
                            'status':
                            True,
                            'msg':
                            _('Upload succeed, proceeding...'),
                            'job': {
                                'id': m_job.id,
                                'result': m_job.result
                            }
                        })
                    else:
                        m_result = handle_uploaded_file(request)
                        succeed = m_result['success']
                        if succeed:
                            result_dict.update({
                                'status':
                                True,
                                'msg':
                                _('Upload succeed, proceeding...'),
                                'job': {
                                    'id': None,
                                    'result': {
                                        'version': m_result['version']
                                    }
                                }
                            })
                        else:
                            result_dict.update({
                                'status': False,
                                'msg': m_result['exception'],
                                'job': None
                            })
                else:
                    result_dict.update({
                        'status': False,
                        'msg': _('Upload failed, invalid form.'),
                        'job': None
                    })
            return HttpResponse(json.dumps(result_dict),
                                content_type='application/json')
        else:
            # render upload result
            form = UploadForm(request.POST, request.FILES)
            if form.is_valid():
                # Handle File
                if settings.ENABLE_REDIS is True:
                    m_job = handle_uploaded_file(request)
                    job_id = m_job.id
                    msg = _('Upload succeed, proceeding...')
                else:
                    m_result = handle_uploaded_file(request)
                    if m_result["success"] is True:
                        return redirect(
                            Version.objects.get(
                                id=int(m_result["version"])).get_admin_url())
                    else:
                        job_id = ''
                        msg = m_result["exception"]
            else:
                job_id = ''
                msg = _('Upload failed, invalid form.')
            form = UploadForm()
            context = admin.site.each_context(request)
            context.update({
                'title': _('Upload New Packages'),
                'form': form,
                'job_id': job_id,
                'msg': msg
            })
            template = 'admin/upload.html'
            return render(request, template, context)
    else:
        form = UploadForm()
        context = admin.site.each_context(request)
        context.update({
            'title': _('Upload New Packages'),
            'form': form,
            'job_id': ''
        })
        template = 'admin/upload.html'
        return render(request, template, context)
Beispiel #41
0
    def get_context_data(self, *args, **kwargs):
        can_translate = False
        can_translate_stats = False
        User = get_user_model()

        if self.request.user.is_superuser or self.language:
            can_translate = True
            can_translate_stats = True
        elif self.project:
            can_translate = True

        ctx = super().get_context_data(*args, **kwargs)

        lang_code, proj_code = split_pootle_path(self.pootle_path)[:2]
        top_scorers = User.top_scorers(
            project=proj_code,
            language=lang_code,
            limit=TOP_CONTRIBUTORS_CHUNK_SIZE + 1,
        )
        top_scorers = get_top_scorers_data(top_scorers, TOP_CONTRIBUTORS_CHUNK_SIZE)

        can_admin_due_dates = bool(self.project and self.has_admin_access)
        due_date = None
        if can_admin_due_dates:
            try:
                due_date_obj = DueDate.objects.get(pootle_path=self.pootle_path,)
                due_date = {
                    "id": due_date_obj.id,
                    "due_on": due_date_obj.due_on,
                    "pootle_path": due_date_obj.pootle_path,
                }
            except DueDate.DoesNotExist:
                due_date = {
                    "id": 0,
                    "due_on": 0,
                    "pootle_path": self.pootle_path,
                }

        pending_tasks = None
        PENDING_TASKS_LIMIT = 3
        if lang_code and self.request.user.is_authenticated:
            tasks = DueDate.tasks(lang_code, user=self.request.user)
            pending_tasks = {
                "total": tasks.total,
                "items": tasks[:PENDING_TASKS_LIMIT],
            }

        pending_jobs = 0
        if self.request.user.is_superuser:
            from django_rq.queues import get_queue

            queue = get_queue()
            pending_jobs = queue.count

        has_disabled_items = self.request.user.is_superuser and any(
            (
                getattr(item, "disabled", False)
                or not self.stats["total"]
                or self.stats["total"] < 1
            )
            for item in self.object.children
        )

        ctx.update(
            {
                "page": "browse",
                "stats_pending_jobs": pending_jobs,
                "stats_refresh_attempts_count": STATS_REFRESH_ATTEMPTS_COUNT,
                "browsing_data": self.get_browsing_data(),
                "can_translate": can_translate,
                "can_translate_stats": can_translate_stats,
                "has_disabled_items": has_disabled_items,
                "top_scorers": remove_empty_from_dict(top_scorers),
                "browser_extends": self.template_extends,
                "can_admin_due_dates": can_admin_due_dates,
                "due_date": due_date,
                "pending_tasks": pending_tasks,
            }
        )

        return ctx
Beispiel #42
0
def create_update_cache_job(instance, keys, decrement=1):
    queue = get_queue('default')
    queue.connection.sadd(queue.redis_queues_keys, queue.key)
    job_wrapper = JobWrapper.create(update_cache_job,
                                    instance=instance,
                                    keys=keys,
                                    decrement=decrement,
                                    connection=queue.connection,
                                    origin=queue.name,
                                    timeout=queue.DEFAULT_TIMEOUT)
    last_job_key = instance.get_last_job_key()

    with queue.connection.pipeline() as pipe:
        while True:
            try:
                pipe.watch(last_job_key)
                last_job_id = queue.connection.get(last_job_key)
                depends_on_wrapper = None
                if last_job_id is not None:
                    pipe.watch(Job.key_for(last_job_id),
                               JobWrapper.params_key_for(last_job_id))
                    depends_on_wrapper = JobWrapper(last_job_id,
                                                    queue.connection)

                pipe.multi()

                depends_on_status = None
                if depends_on_wrapper is not None:
                    depends_on = depends_on_wrapper.job
                    depends_on_status = depends_on.get_status()

                if depends_on_status is None:
                    # enqueue without dependencies
                    pipe.set(last_job_key, job_wrapper.id)
                    job_wrapper.save_enqueued(pipe)
                    pipe.execute()
                    break

                if depends_on_status in [JobStatus.QUEUED, JobStatus.DEFERRED]:
                    new_job_params = \
                        depends_on_wrapper.merge_job_params(keys, decrement,
                                                            pipeline=pipe)
                    pipe.execute()
                    msg = 'SKIP %s (decrement=%s, job_status=%s, job_id=%s)'
                    msg = msg % (last_job_key, new_job_params[1],
                                 depends_on_status, last_job_id)
                    logger.debug(msg)
                    # skip this job
                    return None

                pipe.set(last_job_key, job_wrapper.id)

                if depends_on_status not in [JobStatus.FINISHED]:
                    # add job as a dependent
                    job = job_wrapper.save_deferred(last_job_id, pipe)
                    pipe.execute()
                    logger.debug('ADD AS DEPENDENT for %s (job_id=%s) OF %s' %
                                 (last_job_key, job.id, last_job_id))
                    return job

                job_wrapper.save_enqueued(pipe)
                pipe.execute()
                break
            except WatchError:
                logger.debug('RETRY after WatchError for %s' % last_job_key)
                continue
    logger.debug('ENQUEUE %s (job_id=%s)' % (last_job_key, job_wrapper.id))
    queue.push_job_id(job_wrapper.id)
Beispiel #43
0
 def test_get_queues_same_connection(self):
     """
     Checks that getting queues with the same redis connection is ok.
     """
     self.assertEqual(get_queues('test', 'test2'), [get_queue('test'), get_queue('test2')])
Beispiel #44
0
 def handle(self, **options):
     queue = get_queue()
     failed_job_registry = FailedJobRegistry(queue.name, queue.connection)
     for job_id in failed_job_registry.get_job_ids():
         failed_job_registry.requeue(job_id)