Beispiel #1
0
    def post(self, request, *args, **kwargs):
        """ handle http POST requests
        """
        import time
        from webui.scheduler.log import get_redis_key
        from webui.cnmain.utils import get_redis

        obj = self.get_object()
        args = [obj]
        if self.last_executed_required:
            args.append(int(request.POST.get('last_executed', 0)))
        task_kwargs = {}
        if request.POST.get('force') == 'true':
            task_kwargs['force'] = True
        task_id = str(self.task.delay(*args, **task_kwargs))
        redis_key = get_redis_key(task_id)
        red = get_redis()

        for _ in range(10):
            if red.exists(redis_key):
                return HttpResponseRedirect("{url}?{class_}={pk}".format(
                    url=reverse('scheduler_result_detail_view',
                                args=[task_id]),
                    class_=self.model.__name__.lower(),
                    pk=obj.pk))
            time.sleep(1)

        messages.error(
            request,
            'Something went wrong, ask Sentry (task id {} )..'.format(task_id))
        return redirect(obj)
Beispiel #2
0
def process_source(source, older_than=0):
    """Processes a source"""
    red = redis.Redis()
    task_id = process_source.request.id

    local_manager.cleanup()
    loggy = get_redis_logger(task_id)
    local.logger = loggy

    red.zadd(
        'source:{}'.format(source.pk),
        get_redis_key(task_id),
        timestamp_now()
    )

    # Init Handler, if an error occurs Source will not be processed.
    loggy.info('Evaluating Init Handler')
    try:
        wf_output = _process_init_handler(source)
    except:
        loggy.exception(
            'An error occurred while processing Init Handler for source [%s]',
            unicode(source)
        )
        raise
    else:
        loggy.info(
            'Init handler executed successfully. Output %s', wf_output
        )

    # Select never scheduled datasets.
    the_date = timezone.now() - datetime.timedelta(seconds=older_than)
    dataset_ctype = ContentType.objects.get_for_model(Dataset)
    already_scheduled_datasets = Scheduler.objects.filter(
        content_type=dataset_ctype, created__gte=the_date).values('object_id')
    datasets = Dataset.objects.filter(source=source)\
                              .exclude(pk__in=already_scheduled_datasets)

    count = datasets.count()
    if count:
        loggy.info('Processing %d datasets', datasets.count())

        result = group(
            [process_dataset.s(ds, logger_name=task_id)
                for ds in datasets]
        ).apply()

        if result.successful():
            dispose_sequence.delay(result.join(), source, task_id).get()
        else:
            loggy.info('An error occurred in a process_dataset')
    else:
        loggy.info('No datasets to process')

    loggy.info(END)
Beispiel #3
0
def process_source(source, older_than=0):
    """Processes a source"""
    red = redis.Redis()
    task_id = process_source.request.id

    local_manager.cleanup()
    loggy = get_redis_logger(task_id)
    local.logger = loggy

    red.zadd('source:{}'.format(source.pk), get_redis_key(task_id),
             timestamp_now())

    # Init Handler, if an error occurs Source will not be processed.
    loggy.info('Evaluating Init Handler')
    try:
        wf_output = _process_init_handler(source)
    except:
        loggy.exception(
            'An error occurred while processing Init Handler for source [%s]',
            unicode(source))
        raise
    else:
        loggy.info('Init handler executed successfully. Output %s', wf_output)

    # Select never scheduled datasets.
    the_date = timezone.now() - datetime.timedelta(seconds=older_than)
    dataset_ctype = ContentType.objects.get_for_model(Dataset)
    already_scheduled_datasets = Scheduler.objects.filter(
        content_type=dataset_ctype, created__gte=the_date).values('object_id')
    datasets = Dataset.objects.filter(source=source)\
                              .exclude(pk__in=already_scheduled_datasets)

    count = datasets.count()
    if count:
        loggy.info('Processing %d datasets', datasets.count())

        result = group([
            process_dataset.s(ds, logger_name=task_id) for ds in datasets
        ]).apply()

        if result.successful():
            dispose_sequence.delay(result.join(), source, task_id).get()
        else:
            loggy.info('An error occurred in a process_dataset')
    else:
        loggy.info('No datasets to process')

    loggy.info(END)
Beispiel #4
0
    def post(self, request, *args, **kwargs):
        """ handle http POST requests
        """
        import time
        from webui.scheduler.log import get_redis_key
        from webui.cnmain.utils import get_redis

        obj = self.get_object()
        args = [obj]
        if self.last_executed_required:
            args.append(int(request.POST.get('last_executed', 0)))
        task_kwargs = {}
        if request.POST.get('force') == 'true':
            task_kwargs['force'] = True
        task_id = str(self.task.delay(*args, **task_kwargs))
        redis_key = get_redis_key(task_id)
        red = get_redis()

        for _ in range(10):
            if red.exists(redis_key):
                return HttpResponseRedirect(
                    "{url}?{class_}={pk}".format(
                        url=reverse(
                            'scheduler_result_detail_view',
                            args=[task_id]
                        ),
                        class_=self.model.__name__.lower(),
                        pk=obj.pk
                    )
                )
            time.sleep(1)

        messages.error(
            request,
            'Something went wrong, ask Sentry (task id {} )..'.format(task_id)
        )
        return redirect(obj)