Esempio n. 1
0
    def send_message(self, content="", group=None, sent_by="", eta=None):
        """
        Send SMS to an individual.

        If the person is blocking us, we skip them.
        """
        if not content:
            # No content, skip sending a message
            logger.info("Message content empty, skip api call")
            return
        if self.is_blocking or self.never_contact:
            return
        elif eta is None:
            async_task("apostello.tasks.recipient_send_message_task", self.pk,
                       content, group, sent_by)
        else:
            try:
                groupObj = RecipientGroup.objects.get(name=group)
            except RecipientGroup.DoesNotExist as e:
                groupObj = None
            QueuedSms.objects.create(time_to_send=eta,
                                     content=content,
                                     sent_by=sent_by,
                                     recipient_group=groupObj,
                                     recipient=self)
Esempio n. 2
0
 def save(self, *args, **kwargs):
     """Override save method to invalidate cache."""
     super(SmsInbound, self).save(*args, **kwargs)
     # invalidate per person last sms cache
     cache.set("last_msg__{0}".format(self.sender_num), None, 0)
     # update number of matched responses caches
     async_task("apostello.tasks.populate_keyword_response_count")
Esempio n. 3
0
 def spawn_eligible(self):
     for fileset in self._enum_eligible_filesets():
         async_task('planb.tasks.conditional_run',
                    fileset.pk,
                    broker=get_broker(settings.Q_MAIN_QUEUE),
                    q_options={'hook': 'planb.core.tasks.finalize_run'})
         logger.info('[%s] Scheduled backup', fileset)
    def handle(self, *args, **options):
        d = {}

        for username in ins_list:
            try:
                profile = instaloader.Profile.from_username(
                    instagram_v2.ins.context, username)
                d[profile.full_name] = (profile.userid,
                                        profile.profile_pic_url)
            except Exception as e:
                self.stderr.write(e)

        for k, v in d.items():
            m: Member = Member.objects.filter(
                english_name__icontains=k).first()
            if m:
                user_id, profile_pic_url = v
                m.instagram_id = str(user_id)

                if not m.avatar:
                    media = Media.objects.create(original_url=profile_pic_url)
                    m.avatar = media
                    m.save()
                    async_task(media.download_to_local)

                self.stdout.write(
                    f'{m.english_name} with instagram_id {m.instagram_id}')
Esempio n. 5
0
def system_modificator(request):
    """ function to modify many systems at once (helper function to call the real function) """

    # form was valid to post
    if request.method == "POST":
        request_post = request.POST
        request_user = request.user

        # call async function
        async_task(
            "dfirtrack_main.modificator.system_modificator.system_modificator_async",
            request_post,
            request_user,
        )

        return redirect(reverse('system_list'))

    # show empty form
    else:
        form = SystemModificatorForm(initial={
            'systemstatus': 2,
            'analysisstatus': 1,
        })

    # call logger
    debug_logger(str(request.user), " SYSTEM_MODIFICATOR_ENTERED")
    return render(request, 'dfirtrack_main/system/system_modificator.html',
                  {'form': form})
Esempio n. 6
0
 def get(self, request, torrent_id=None, *args, **kwargs):
     if torrent_id == None:
         async_task('subprocess.call', [secrets.transmission_sync_script])
     else:
         async_task('subprocess.call', [secrets.transmission_sync_script, torrent_id])
         
     return HttpResponse()
Esempio n. 7
0
def execute(request):
    workflow_id = request.POST['workflow_id']
    if workflow_id == '' or workflow_id is None:
        context = {'errMsg': 'workflow_id参数为空.'}
        return render(request, 'error.html', context)

    workflow_id = int(workflow_id)
    workflow_detail = SqlWorkflow.objects.get(id=workflow_id)
    instance_name = workflow_detail.instance_name
    db_name = workflow_detail.db_name
    url = get_detail_url(request, workflow_id)

    if can_execute(request.user, workflow_id) is False:
        context = {'errMsg': '你无权操作当前工单!'}
        return render(request, 'error.html', context)

    # 判断是否高危SQL,禁止执行
    if SysConfig().sys_config.get('critical_ddl_regex', '') != '':
        if InceptionDao().critical_ddl(workflow_detail.sql_content):
            context = {'errMsg': '高危语句,禁止执行!'}
            return render(request, 'error.html', context)

    # 将流程状态修改为执行中,并更新reviewok_time字段
    workflow_detail.status = Const.workflowStatus['executing']
    workflow_detail.reviewok_time = timezone.now()
    workflow_detail.save()
    async_task('sql.utils.execute_sql.execute',
               workflow_detail.id,
               hook='sql.utils.execute_sql.execute_callback')
    return HttpResponseRedirect(reverse('sql:detail', args=(workflow_id, )))
Esempio n. 8
0
def systemsorted(request):
    """ exports markdown report for all systems (helper function to call the real function) """

    request_user = request.user

    # call logger
    debug_logger(str(request_user),
                 " SYSTEM_EXPORTER_MARKDOWN_SYSTEMSORTED_BEGIN")

    # check variables
    stop_exporter_markdown = check_config(request)

    # leave if variables caused errors
    if stop_exporter_markdown:
        return

    # show immediate message for user (but only if no errors have occured before)
    messages.success(request,
                     'System exporter markdown (sorted by system) started')

    # call async function
    async_task(
        "dfirtrack_main.exporter.markdown.systemsorted.systemsorted_async",
        request_user,
    )

    return
Esempio n. 9
0
def dump_data_json(request):
    async_task("logserver.tasks.dump_data_to_json",
               q_options={'task_name': 'dump_to_json'})
    messages.success(
        request,
        'Added Job to dump Messages to json. File will be stored in ./media')
    return redirect("appsettings-index")
Esempio n. 10
0
def update_payment(request):
    import json
    # if this is a POST request we need to process the form data
    if request.method == 'POST':
        # create a form instance and populate it with data from the request:
        form = OrderPaymentForm(request.POST)
        # check whether it's valid:
        if form.is_valid():
            # process the data in form.cleaned_data as required
            order_id = form.cleaned_data['id']
            payment_date = form.cleaned_data['payment_date']
            payment_ref = form.cleaned_data['payment_date']
            # Create Order
            user = User.objects.get(username=request.user)
            order = Order.objects.get(name=order_id, user=user)

            # print(booking,ref,order_name,booking_obj,user)
            order.paid = True
            order.payment_date = ''
            order.payment_ref = ''
            order.save()

            # Added on July 2,2021 -- To delete QRID redis
            qrid = order.qrid
            async_task('utility.payment.delete_payment_channel', qrid)

            # redirect to a new URL:
            return HttpResponseRedirect(
                reverse_lazy('order:detail', kwargs={'pk': order.id}))

    # if a GET (or any other method) we'll create a blank form
    else:
        form = CreateOrderForm()

    return render(request, 'order/order_form.html', {'form': form})
Esempio n. 11
0
 def delete(self, request, *args, **kwargs):
     self.object = self.get_object()
     # Added on July 5,2021 -- To delete QRID redis
     qrid = self.object.qrid
     async_task('utility.payment.delete_payment_channel', qrid)
     self.object.delete()
     return redirect(self.get_success_url())
Esempio n. 12
0
def test_bad_secret(broker, monkeypatch):
    broker.list_key = 'test_bad_secret:q'
    async_task('math.copysign', 1, -1, broker=broker)
    stop_event = Event()
    stop_event.set()
    start_event = Event()
    cluster_id = uuidlib.uuid4()
    s = Sentinel(stop_event,
                 start_event,
                 cluster_id=cluster_id,
                 broker=broker,
                 start=False)
    Stat(s).save()
    # change the SECRET
    monkeypatch.setattr(Conf, "SECRET_KEY", "OOPS")
    stat = Stat.get_all()
    assert len(stat) == 0
    assert Stat.get(pid=s.parent_pid, cluster_id=cluster_id) is None
    task_queue = Queue()
    pusher(task_queue, stop_event, broker=broker)
    result_queue = Queue()
    task_queue.put('STOP')
    worker(
        task_queue,
        result_queue,
        Value('f', -1),
    )
    assert result_queue.qsize() == 0
    broker.delete_queue()
Esempio n. 13
0
def request_delete_account(request, user_slug):
    if request.method != "POST":
        return redirect("edit_account", user_slug, permanent=False)

    user = get_object_or_404(User, slug=user_slug)
    if user.id != request.me.id and not request.me.is_god:
        raise Http404()

    confirmation_string = request.POST.get("confirm")
    if confirmation_string != settings.GDPR_DELETE_CONFIRMATION:
        raise BadRequest(
            title="Неправильная строка подтверждения",
            message=
            f"Вы должны в точности написать \"{settings.GDPR_DELETE_CONFIRMATION}\" "
            f"чтобы запустить процедуру удаления аккаунта")

    DataRequests.register_forget_request(user)

    code = Code.create_for_user(user=user,
                                recipient=user.email,
                                length=settings.GDPR_DELETE_CODE_LENGTH)
    async_task(send_delete_account_request_email, user=user, code=code)

    return render(request, "users/messages/delete_account_requested.html",
                  {"user": user})
Esempio n. 14
0
    def update(self, instance, validated_data):
        instance.created = validated_data.get('created', instance.created)
        instance.updated = validated_data.get('updated', instance.updated)
        instance.survey = validated_data.get('survey', instance.survey)

        answers = self.initial_data['answers']
        for answer in answers:
            if answer.get('id', None):
                ans = models.Answer.objects.get(id=answer['id'])
                ans.body = answer['body']
                ans.updated = timezone.now()
                ans.save()
            else:
                models.Answer.objects.create(
                    question=answer['question'],
                    response=instance,
                    body=answer["body"],
                    created=timezone.now(),
                    updated=timezone.now(),
                )

        instance.photo.all().delete()

        photos = validated_data.get('photo', [])
        content_type = ContentType.objects.get(model='response',
                                               app_label='lists')
        for photo in photos:
            models.Attachment.objects.create(object_id=instance.id,
                                             content_type=content_type,
                                             **photo)
        async_task(tasks.basic_report, instance)
        return instance
Esempio n. 15
0
def _consume(filepath):
    if os.path.isdir(filepath):
        return

    if not os.path.isfile(filepath):
        logger.debug(f"Not consuming file {filepath}: File has moved.")
        return

    if not is_file_ext_supported(os.path.splitext(filepath)[1]):
        logger.warning(
            f"Not consuming file {filepath}: Unknown file extension.")
        return

    tag_ids = None
    try:
        if settings.CONSUMER_SUBDIRS_AS_TAGS:
            tag_ids = _tags_from_path(filepath)
    except Exception as e:
        logger.exception("Error creating tags from path")

    try:
        async_task("documents.tasks.consume_file",
                   filepath,
                   override_tag_ids=tag_ids if tag_ids else None,
                   task_name=os.path.basename(filepath)[:100])
    except Exception as e:
        # Catch all so that the consumer won't crash.
        # This is also what the test case is listening for to check for
        # errors.
        logger.exception("Error while consuming document")
Esempio n. 16
0
def intro(request):
    if (request.me.is_profile_complete and request.me.is_profile_reviewed
            and not request.me.is_profile_rejected):
        return redirect("profile", request.me.slug)

    if request.method == "POST":
        form = UserIntroForm(request.POST, request.FILES, instance=request.me)
        if form.is_valid():
            user = form.save(commit=False)

            # send to moderation
            user.is_profile_complete = True
            user.is_profile_reviewed = False
            user.is_profile_rejected = False
            user.save()

            # create intro post
            intro_post = Post.upsert_user_intro(user,
                                                form.cleaned_data["intro"],
                                                is_visible=False)

            # notify moderators to review profile
            async_task(notify_profile_needs_review, user, intro_post)

            return redirect("on_review")
    else:
        existing_intro = Post.get_user_intro(request.me)
        form = UserIntroForm(
            instance=request.me,
            initial={"intro": existing_intro.text if existing_intro else ""},
        )

    return render(request, "users/intro.html", {"form": form})
Esempio n. 17
0
def task_creator(request):
    """ function to create many tasks for many systems at once (helper function to call the real function) """

    # form was valid to post
    if request.method == 'POST':
        request_post = request.POST
        request_user = request.user

        # call async function
        async_task(
            "dfirtrack_main.creator.task_creator.task_creator_async",
            request_post,
            request_user,
        )

        return redirect(reverse('task_list'))

    # show empty form
    else:
        form = TaskCreatorForm(initial={
            'taskpriority': 2,
            'taskstatus': 1,
        })

        # call logger
        debug_logger(str(request.user), " TASK_CREATOR_ENTERED")
    return render(request, 'dfirtrack_main/task/task_creator.html',
                  {'form': form})
Esempio n. 18
0
def handle_irm_snapshot_post_save(sender, instance, created, raw, using,
                                  update_fields, **kwargs):
    async_task(
        tasks.import_irm_snapshot,
        snapshot_id=instance.id,
        task_name="Import IRM Snappshot file of infrastructue projects",
    )
Esempio n. 19
0
def test_max_rss(broker, monkeypatch):
    # set up the Sentinel
    broker.list_key = 'test_max_rss_test:q'
    async_task('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    start_event = Event()
    stop_event = Event()
    cluster_id = uuidlib.uuid4()
    # override settings
    monkeypatch.setattr(Conf, 'MAX_RSS', 40000)
    monkeypatch.setattr(Conf, 'WORKERS', 1)
    # set a timer to stop the Sentinel
    threading.Timer(3, stop_event.set).start()
    s = Sentinel(stop_event, start_event, cluster_id=cluster_id, broker=broker)
    assert start_event.is_set()
    assert s.status() == Conf.STOPPED
    assert s.reincarnations == 1
    async_task('django_q.tests.tasks.multiply', 2, 2, broker=broker)
    task_queue = Queue()
    result_queue = Queue()
    # push the task
    pusher(task_queue, stop_event, broker=broker)
    # worker should exit on recycle
    worker(task_queue, result_queue, Value('f', -1))
    # check if the work has been done
    assert result_queue.qsize() == 1
    # save_limit test
    monkeypatch.setattr(Conf, 'SAVE_LIMIT', 1)
    result_queue.put('STOP')
    # run monitor
    monitor(result_queue)
    assert Success.objects.count() == Conf.SAVE_LIMIT
    broker.delete_queue()
Esempio n. 20
0
File: views.py Progetto: AHRJ/mailer
    def get(self, request, *args, **kwargs):
        self.object = self.get_object()
        context = self.get_context_data(object=self.object)

        letter_title = self.object.title
        letter_body = render_to_string(
            f"letter/{self.model.letter_type}_detail.html", context=context)
        letter_send_date = self.object.send_date
        letter_addresbook_ids = [
            entry.id for entry in self.object.addressbooks.all()
        ]

        async_task(
            SPSender.add_campaigns,
            hook=CreateCampaignView.assign_campaigns,
            from_email="*****@*****.**",
            from_name="ИД Животноводство",
            subject=letter_title,
            body=letter_body,
            send_date=letter_send_date,
            addressbook_ids=letter_addresbook_ids,
            letter=self.object,
        )
        self.object.status = Letter.Status.PENDING
        self.object.save()

        return HttpResponseRedirect(
            reverse(f"admin:letter_{self.model.letter_type}_changelist"))
Esempio n. 21
0
def handle_irm_snapshot_post_delete(sender, instance, using, **kwargs):
    async_task(
        tasks.index_irm_projects,
        snapshot_id=instance.id,
        task_name=
        "Update infrastructure projects search index following snapshot deletion",
    )
Esempio n. 22
0
    def post(self, request, *args, **kwargs):

        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)

        document = serializer.validated_data['document']
        document_data = serializer.validated_data['document_data']
        correspondent_id = serializer.validated_data['correspondent_id']
        document_type_id = serializer.validated_data['document_type_id']
        tag_ids = serializer.validated_data['tag_ids']
        title = serializer.validated_data['title']

        t = int(mktime(datetime.now().timetuple()))

        os.makedirs(settings.SCRATCH_DIR, exist_ok=True)

        with tempfile.NamedTemporaryFile(prefix="paperless-upload-",
                                         dir=settings.SCRATCH_DIR,
                                         delete=False) as f:
            f.write(document_data)
            os.utime(f.name, times=(t, t))

            async_task("documents.tasks.consume_file",
                       f.name,
                       override_filename=document.name,
                       override_title=title,
                       override_correspondent_id=correspondent_id,
                       override_document_type_id=document_type_id,
                       override_tag_ids=tag_ids,
                       task_name=os.path.basename(document.name)[:100])
        return Response("OK")
Esempio n. 23
0
def intro(request):
    if request.me.moderation_status == User.MODERATION_STATUS_APPROVED:
        return redirect("profile", request.me.slug)

    if request.method == "POST":
        form = UserIntroForm(request.POST, request.FILES, instance=request.me)
        if form.is_valid():
            user = form.save(commit=False)

            # send to moderation
            user.moderation_status = User.MODERATION_STATUS_ON_REVIEW
            user.save()

            # create intro post
            intro_post = Post.upsert_user_intro(user,
                                                form.cleaned_data["intro"],
                                                is_visible=False)

            Geo.update_for_user(user)

            # notify moderators to review profile
            async_task(notify_profile_needs_review, user, intro_post)

            return redirect("on_review")
    else:
        existing_intro = Post.get_user_intro(request.me)
        form = UserIntroForm(
            instance=request.me,
            initial={"intro": existing_intro.text if existing_intro else ""},
        )

    return render(request, "users/intro.html", {"form": form})
Esempio n. 24
0
def block_notify(request):
    """block_notify.

    :param request:
    """
    async_task('registration.tasks.query_transactions')
    return JsonResponse({"Transactions": "Qeuried"})
Esempio n. 25
0
def execute(request):
    workflow_id = request.POST['workflow_id']
    if workflow_id == '' or workflow_id is None:
        context = {'errMsg': 'workflow_id参数为空.'}
        return render(request, 'error.html', context)

    workflow_id = int(workflow_id)
    workflow_detail = SqlWorkflow.objects.get(id=workflow_id)

    if can_execute(request.user, workflow_id) is False:
        context = {'errMsg': '你无权操作当前工单!'}
        return render(request, 'error.html', context)

    # 将流程状态修改为执行中,并更新reviewok_time字段
    workflow_detail.status = 'workflow_executing'
    workflow_detail.reviewok_time = timezone.now()
    workflow_detail.save()
    async_task('sql.utils.execute_sql.execute', workflow_detail.id, hook='sql.utils.execute_sql.execute_callback',
               timeout=-1)
    # 增加工单日志
    audit_id = Audit.detail_by_workflow_id(workflow_id=workflow_id,
                                           workflow_type=WorkflowDict.workflow_type['sqlreview']).audit_id
    Audit.add_log(audit_id=audit_id,
                  operation_type=5,
                  operation_type_desc='执行工单',
                  operation_info="人工操作执行",
                  operator=request.user.username,
                  operator_display=request.user.display
                  )
    return HttpResponseRedirect(reverse('sql:detail', args=(workflow_id,)))
Esempio n. 26
0
def tag_creator(request):
    """ function to create many tags for many systems at once (helper function to call the real function) """

    # form was valid to post
    if request.method == 'POST':

        # get objects from request object
        request_post = request.POST
        request_user = request.user

        # show immediate message for user
        messages.success(request, 'Tag creator started')

        # call async function
        async_task(
            "dfirtrack_main.creator.tag_creator.tag_creator_async",
            request_post,
            request_user,
        )

        # return directly to tag list
        return redirect(reverse('tag_list'))

    # show empty form
    else:
        form = TagCreatorForm()

        # call logger
        debug_logger(str(request.user), " TAG_CREATOR_ENTERED")

    return render(request, 'dfirtrack_main/tag/tag_creator.html',
                  {'form': form})
Esempio n. 27
0
def execute(request):
    """
    执行SQL
    :param request:
    :return:
    """
    # 校验多个权限
    if not (request.user.has_perm('sql.sql_execute')
            or request.user.has_perm('sql.sql_execute_for_resource_group')):
        raise PermissionDenied
    workflow_id = int(request.POST.get('workflow_id', 0))
    if workflow_id == 0:
        context = {'errMsg': 'workflow_id参数为空.'}
        return render(request, 'error.html', context)

    if can_execute(request.user, workflow_id) is False:
        context = {'errMsg': '你无权操作当前工单!'}
        return render(request, 'error.html', context)

    if on_correct_time_period(workflow_id) is False:
        context = {'errMsg': '不在可执行时间范围内,如果需要修改执行时间请重新提交工单!'}
        return render(request, 'error.html', context)
    # 根据执行模式进行对应修改
    mode = request.POST.get('mode')
    if mode == "auto":
        status = "workflow_executing"
        operation_type = 5
        operation_type_desc = '执行工单'
        operation_info = "自动操作执行"
        finish_time = None
    else:
        status = "workflow_finish"
        operation_type = 6
        operation_type_desc = '手工工单'
        operation_info = "确认手工执行结束"
        finish_time = datetime.datetime.now()
    # 将流程状态修改为对应状态
    SqlWorkflow(
        id=workflow_id, status=status,
        finish_time=finish_time).save(update_fields=['status', 'finish_time'])

    # 增加工单日志
    audit_id = Audit.detail_by_workflow_id(
        workflow_id=workflow_id,
        workflow_type=WorkflowDict.workflow_type['sqlreview']).audit_id
    Audit.add_log(audit_id=audit_id,
                  operation_type=operation_type,
                  operation_type_desc=operation_type_desc,
                  operation_info=operation_info,
                  operator=request.user.username,
                  operator_display=request.user.display)
    if mode == "auto":
        # 加入执行队列
        async_task('sql.utils.execute_sql.execute',
                   workflow_id,
                   hook='sql.utils.execute_sql.execute_callback',
                   timeout=-1,
                   task_name=f'sqlreview-execute-{workflow_id}')

    return HttpResponseRedirect(reverse('sql:detail', args=(workflow_id, )))
Esempio n. 28
0
    def log(
        cls,
        action,
        model_instance,
        obj_pk=None,
        user=None,
        remote_ip=None,
        template_data=None,
        changes=None,
        request=None,
        created_at=None,
    ):
        """
        Usage:
        >>> from hyakumori_crm.crm.models import Forest
        >>> forest = Forest.objects.first()
        >>> ActivityService.log(ForestActions.created, forest)
        """

        if changes is None:
            changes = {}

        if request is not None:
            if remote_ip is None:
                remote_ip = get_remote_ip(request)
            if user is None and request.user is not None:
                user = request.user

        try:
            template_name = action[0] if isinstance(action, tuple) else action
            template = MessageTemplate.objects.get(name=template_name)
            content_type = ContentType.objects.get_for_model(model_instance)
            log = ActionLog.objects.create(
                content_type=content_type,
                object_pk=obj_pk if obj_pk is not None else model_instance.pk,
                template_name=template.name,
                template_data=template_data,
                changes=changes,
                user=user,
                remote_ip=remote_ip,
            )
            if created_at is not None:
                log.created_at = created_at
            async_task(
                slack_notify,
                message=template.template,
                user_fullname=user.full_name,
                dt=log.created_at,
                obj_title=model_instance.REPR_NAME,
                obj_name=model_instance.repr_name(),
                ack_failure=True,
            )
            return log.save(update_fields=["created_at"])
        except Exception as e:
            cls.logger.warning(
                f"Error while creating activity log for {action} {model_instance}",
                exc_info=e,
            )
            return ActionLog.objects.none()
Esempio n. 29
0
def retry_failed(FailAdmin, request, queryset):
    """Submit selected tasks back to the queue."""
    for task in queryset:
        async_task(task.func,
                   *task.args or (),
                   hook=task.hook,
                   **task.kwargs or {})
        task.delete()
 def download():
     async_task(
         func=get_cykliste_sobe_layer.get_cs_features_layer_func,
         cache_key=get_cykliste_sobe_layer.cache_key,
         cache_time=get_cykliste_sobe_layer.long_cache_time,
         save_to_file=get_cykliste_sobe_layer.features_file_path,
         save=True,
         sync=True)
Esempio n. 31
0
    def alesco_import(self, request):
        """Displays a form prompting user to upload an Excel spreadsheet of
        employee data from Alesco. Temporary measure until database link is
        worked out.
        """
        context = dict(
            site.each_context(request),
            title='Alesco data import'
        )

        if request.method == 'POST':
            form = self.AlescoImportForm(request.POST, request.FILES)
            if form.is_valid():
                upload = request.FILES['spreadsheet']
                # Run the task asynchronously.
                async_task(alesco_data_import, upload)
                messages.info(request, 'Alesco data spreadsheet uploaded successfully; data is now being processed.')
                return redirect('admin:organisation_departmentuser_changelist')
        else:
            form = self.AlescoImportForm()
        context['form'] = form

        return TemplateResponse(request, 'organisation/alesco_import.html', context)
Esempio n. 32
0
def test_cached(broker):
    broker.purge_queue()
    broker.cache.clear()
    group = 'cache_test'
    # queue the tests
    task_id = async_task('math.copysign', 1, -1, cached=True, broker=broker)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.popysign', 1, -1, cached=True, broker=broker, group=group)
    iter_id = async_iter('math.floor', [i for i in range(10)], cached=True)
    # test wait on cache
    # test wait timeout
    assert result(task_id, wait=10, cached=True) is None
    assert fetch(task_id, wait=10, cached=True) is None
    assert result_group(group, wait=10, cached=True) is None
    assert result_group(group, count=2, wait=10, cached=True) is None
    assert fetch_group(group, wait=10, cached=True) is None
    assert fetch_group(group, count=2, wait=10, cached=True) is None
    # run a single inline cluster
    task_count = 17
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    for i in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    result_queue = Queue()
    worker(task_queue, result_queue, Value('f', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # assert results
    assert result(task_id, wait=500, cached=True) == -1
    assert fetch(task_id, wait=500, cached=True).result == -1
    # make sure it's not in the db backend
    assert fetch(task_id) is None
    # assert group
    assert count_group(group, cached=True) == 6
    assert count_group(group, cached=True, failures=True) == 1
    assert result_group(group, cached=True) == [-1, -1, -1, -1, -1]
    assert len(result_group(group, cached=True, failures=True)) == 6
    assert len(fetch_group(group, cached=True)) == 6
    assert len(fetch_group(group, cached=True, failures=False)) == 5
    delete_group(group, cached=True)
    assert count_group(group, cached=True) is None
    delete_cached(task_id)
    assert result(task_id, cached=True) is None
    assert fetch(task_id, cached=True) is None
    # iter cached
    assert result(iter_id) is None
    assert result(iter_id, cached=True) is not None
    broker.cache.clear()
Esempio n. 33
0
 def confirm_account(self, template='users/email/account_confirmation.html', extra_context=None, subject=None):
     """
     Sends out an account confirm email. Which contains a link to set the user's password.
     This method is also used for the password_reset mechanism.
     """
     async_task('users.schedule.send_confirmation_mail', self, template, extra_context, subject)
Esempio n. 34
0
def do_sync():
    async_task('django_q.tests.tasks.countdown', 1, sync=True, save=True)
Esempio n. 35
0
def retry_failed(FailAdmin, request, queryset):
    """Submit selected tasks back to the queue."""
    for task in queryset:
        async_task(task.func, *task.args or (), hook=task.hook, **task.kwargs or {})
        task.delete()
Esempio n. 36
0
 def ping_sweep(self, request, queryset):
     for obj in queryset:
         async_task('status.utils.run_scan', obj.id)
     self.message_user(request, 'A ping sweep has been scheduled for these scan ranges.')
Esempio n. 37
0
 def run_full_scan(self, request, queryset):
     async_task('status.utils.run_all')
     self.message_user(request, 'A full scan has been scheduled.')
Esempio n. 38
0
 def run_scan_plugins(self, request, queryset):
     for plugin in queryset:
         async_task('status.utils.run_plugin', plugin.id)
     self.message_user(request, 'The scan plugins have been scheduled to run.')
Esempio n. 39
0
 def send(self, *args, **kwargs):
     # Send asynchronously via Django-Q
     from django_q.tasks import async_task
     async_task(self._send, *args, group='django-slack', q_options=kwargs)