Exemplo n.º 1
0
    def test_scheduling(self, settings, mocker, stub):
        in_ms = lambda ms: utcnow() + datetime.timedelta(milliseconds=ms
                                                         )  # noqa
        settings.SCHEDULE = {
            'a': {
                'task':
                stub.path,
                'args': ["a"],
                'singleton':
                False,
                'schedule':
                FixedSchedule([in_ms(10),
                               in_ms(11),
                               in_ms(80),
                               in_ms(1000)])
            },
            'b': {
                'task': stub.path,
                'args': ["b"],
                'singleton': False,
                'schedule': FixedSchedule([in_ms(40), in_ms(1000)])
            },
        }
        queue = Queue()

        def stop_scheduler():
            time.sleep(0.1)
            scheduler.shutdown_requested.set()

        scheduler = Scheduler()
        with futures.ThreadPoolExecutor(max_workers=2) as executor:
            executor.submit(stop_scheduler)
            scheduler.run()
        assert [t.args for t in queue.get_tasks()] == [["a"], ["a"], ["b"],
                                                       ["a"]]
Exemplo n.º 2
0
 def __init__(self, mocker, target):
     from redis_tasks.utils import utcnow
     self.seq = 0
     self.mocker = mocker
     self.target = target
     self.now = utcnow().replace(microsecond=0)
     self.step()
Exemplo n.º 3
0
def get_history():
    max_finished_tasks = 5000
    max_failed_tasks = 1000
    finished_tasks = finished_task_registry.get_tasks(-max_finished_tasks, -1)
    failed_tasks = failed_task_registry.get_tasks(-max_failed_tasks, -1)
    if len(finished_tasks) == max_finished_tasks:
        failed_tasks = [
            t for t in failed_tasks if t.ended_at >= finished_tasks[0].ended_at
        ]
    if len(failed_tasks) == max_failed_tasks:
        finished_tasks = [
            t for t in finished_tasks if t.ended_at >= failed_tasks[0].ended_at
        ]

    now = utcnow()
    running_tasks = []
    for wid, tid in worker_registry.get_running_tasks().items():
        task = Task.fetch(tid)
        task.ended_at = now
        task.running_on = Worker.fetch(wid).description
        running_tasks.append(task)

    tasks = failed_tasks + finished_tasks + running_tasks
    tasks.sort(key=lambda t: t.started_at)

    by_func = defaultdict(list)
    for t in tasks:
        by_func[t.func_name].append(t)

    # reconstruct worker-mapping
    for group in by_func.values():
        workers = []
        for task in sorted(group, key=lambda t: t.started_at):
            workers = [
                None if not t or t.ended_at <= task.started_at else t
                for t in workers
            ]
            try:
                task.worker = workers.index(None)
                workers[task.worker] = task
            except ValueError:
                task.worker = len(workers)
                workers.append(task)

    groups = sorted(by_func.values(),
                    key=lambda group_tasks: (
                        min(t.started_at.timetuple()[3:] for t in group_tasks),
                        max(t.ended_at - t.started_at for t in group_tasks),
                    ))

    collapsed_groups = {
        k
        for k, v in by_func.items() if len(v) / len(tasks) < 0.02
    }

    tz = pytz.timezone(settings.TIMEZONE)
    rows = []
    for t in tasks:
        t.started_at = t.started_at.astimezone(tz)
        t.ended_at = t.ended_at.astimezone(tz)
        keys = {
            'group': t.func_name,
            'subgroup': t.worker,
            'start': t.started_at,
            'title': task_tooltip(t),
        }
        if hasattr(t, 'running_on'):
            keys.update({
                'end': t.ended_at,
                'type': 'range',
                'content': t.running_on,
            })
        elif (t.func_name not in collapsed_groups
              or (t.ended_at - t.started_at) > datetime.timedelta(minutes=1)):
            keys.update({
                'end': t.ended_at,
                'type': 'range',
                'content': f'[{t.ended_at - t.started_at}]',
            })
        else:
            keys.update({
                'type': 'point',
                'content': t.started_at.strftime('%H:%M:%S'),
            })

        if t.status == 'failed':
            keys['style'] = 'border-color: {0}; background-color: {0}'.format(
                '#E69089')
        elif t.status == 'running':
            keys['style'] = 'border-color: {0}; background-color: {0}'.format(
                '#D5F6D7')

        keys = {
            k: v.timestamp() if isinstance(v, datetime.datetime) else v
            for k, v in keys.items()
        }
        rows.append(keys)

    return {
        "rows":
        rows,
        "groups": [{
            'id': group[0].func_name,
            'content': group[0].func_name,
            'order': i
        } for i, group in enumerate(groups)],
    }
Exemplo n.º 4
0
def test_utc():
    now = utils.utcnow()
    assert utils.utcparse(utils.utcformat(now)) == now.replace(microsecond=0)
Exemplo n.º 5
0
def test_serialization():
    my_obj = {'a': 'b', 'c': 54.325, 'd': utils.utcnow()}
    assert utils.deserialize(utils.serialize(my_obj)) == my_obj

    with pytest.raises(exceptions.DeserializationError):
        utils.deserialize(b"invalid")