def test_cached(broker): broker.purge_queue() broker.cache.clear() group = 'cache_test' # queue the tests task = async_task('math.copysign', 1, -1, cached=True, broker=broker) task_id = task['id'] async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.popysign', 1, -1, cached=True, broker=broker, group=group) iter_id = async_iter('math.floor', [i for i in range(10)], cached=True) # test wait on cache # test wait timeout assert result(task_id, wait=10, cached=True) is None assert fetch(task_id, wait=10, cached=True) is None assert result_group(group, wait=10, cached=True) is None assert result_group(group, count=2, wait=10, cached=True) is None assert fetch_group(group, wait=10, cached=True) is None assert fetch_group(group, count=2, wait=10, cached=True) is None # run a single inline cluster task_count = 17 assert broker.queue_size() == task_count task_queue = Queue() stop_event = Event() stop_event.set() for i in range(task_count): pusher(task_queue, stop_event, broker=broker) assert broker.queue_size() == 0 assert task_queue.qsize() == task_count task_queue.put('STOP') result_queue = Queue() worker(task_queue, result_queue, Value('f', -1)) assert result_queue.qsize() == task_count result_queue.put('STOP') monitor(result_queue) assert result_queue.qsize() == 0 # assert results assert result(task_id, wait=500, cached=True) == -1 assert fetch(task_id, wait=500, cached=True).result == -1 # make sure it's not in the db backend assert fetch(task_id) is None # assert group assert count_group(group, cached=True) == 6 assert count_group(group, cached=True, failures=True) == 1 assert result_group(group, cached=True) == [-1, -1, -1, -1, -1] assert len(result_group(group, cached=True, failures=True)) == 6 assert len(fetch_group(group, cached=True)) == 6 assert len(fetch_group(group, cached=True, failures=False)) == 5 delete_group(group, cached=True) assert count_group(group, cached=True) is None delete_cached(task_id) assert result(task_id, cached=True) is None assert fetch(task_id, cached=True) is None # iter cached assert result(iter_id) is None assert result(iter_id, cached=True) is not None broker.cache.clear()
def test_cached(broker): broker.purge_queue() broker.cache.clear() group = 'cache_test' # queue the tests task_id = async_task('math.copysign', 1, -1, cached=True, broker=broker) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.popysign', 1, -1, cached=True, broker=broker, group=group) iter_id = async_iter('math.floor', [i for i in range(10)], cached=True) # test wait on cache # test wait timeout assert result(task_id, wait=10, cached=True) is None assert fetch(task_id, wait=10, cached=True) is None assert result_group(group, wait=10, cached=True) is None assert result_group(group, count=2, wait=10, cached=True) is None assert fetch_group(group, wait=10, cached=True) is None assert fetch_group(group, count=2, wait=10, cached=True) is None # run a single inline cluster task_count = 17 assert broker.queue_size() == task_count task_queue = Queue() stop_event = Event() stop_event.set() for i in range(task_count): pusher(task_queue, stop_event, broker=broker) assert broker.queue_size() == 0 assert task_queue.qsize() == task_count task_queue.put('STOP') result_queue = Queue() worker(task_queue, result_queue, Value('f', -1)) assert result_queue.qsize() == task_count result_queue.put('STOP') monitor(result_queue) assert result_queue.qsize() == 0 # assert results assert result(task_id, wait=500, cached=True) == -1 assert fetch(task_id, wait=500, cached=True).result == -1 # make sure it's not in the db backend assert fetch(task_id) is None # assert group assert count_group(group, cached=True) == 6 assert count_group(group, cached=True, failures=True) == 1 assert result_group(group, cached=True) == [-1, -1, -1, -1, -1] assert len(result_group(group, cached=True, failures=True)) == 6 assert len(fetch_group(group, cached=True)) == 6 assert len(fetch_group(group, cached=True, failures=False)) == 5 delete_group(group, cached=True) assert count_group(group, cached=True) is None delete_cached(task_id) assert result(task_id, cached=True) is None assert fetch(task_id, cached=True) is None # iter cached assert result(iter_id) is None assert result(iter_id, cached=True) is not None broker.cache.clear()
def test_azure_sync_resource_group_delete( session_get_func, get_subscription_and_session_func, mock_response_class, json_file, subscription, broker, mce_app_resource_type_azure_group): """Delete 2 resource group and create change events""" data = json_file("resource_group_list.json") count_groups = len(data['value']) get_subscription_and_session_func.return_value = (subscription, requests.Session()) # Create session_get_func.return_value = mock_response_class(200, data) task_id = async_task( 'mce_tasks_djq.azure.sync_resource_group', subscription.pk, broker=broker, sync=True) task = fetch(task_id) assert task.success is True, result(task_id) assert result(task_id) == dict( errors=0, created=2, updated=0, deleted=0 ) assert ResourceGroupAzure.all_objects.count() == 2 # Delete session_get_func.return_value = mock_response_class(200, {"value": []}) task_id = async_task( 'mce_tasks_djq.azure.sync_resource_group', subscription.pk, task_name='test.azure.sync.resource_group', broker=broker, sync=True) task = fetch(task_id) assert task.success is True, result(task_id) assert result(task_id) == dict( errors=0, created=0, updated=0, deleted=count_groups ) assert ResourceEventChange.objects.filter( action=constants.EventChangeType.DELETE).count() == count_groups assert ResourceGroupAzure.objects.count() == 0 assert ResourceGroupAzure.all_objects.count() == count_groups
def test_azure_sync_resource_group_update( session_get_func, get_subscription_and_session_func, mock_response_class, json_file, subscription, broker, mce_app_resource_type_azure_group): """Update 1 resource group and create change event""" assert ResourceGroupAzure.all_objects.count() == 0 assert ResourceEventChange.all_objects.count() == 0 data = json_file("resource_group_list.json") get_subscription_and_session_func.return_value = (subscription, requests.Session()) # Create session_get_func.return_value = mock_response_class(200, data) task_id = async_task( 'mce_tasks_djq.azure.sync_resource_group', subscription.pk, broker=broker, sync=True) task = fetch(task_id) assert task.success is True, result(task_id) assert result(task_id) == dict( errors=0, created=2, updated=0, deleted=0 ) assert ResourceGroupAzure.all_objects.count() == 2 # Update one data['value'][0]['tags']["testtag"] = "TEST" session_get_func.return_value = mock_response_class(200, data) task_id = async_task( 'mce_tasks_djq.azure.sync_resource_group', subscription.pk, task_name='test.azure.sync.resource_group', broker=broker, sync=True) task = fetch(task_id) assert task.success is True, result(task_id) assert result(task_id) == dict( errors=0, created=0, updated=1, deleted=0 ) assert ResourceEventChange.objects.filter( action=constants.EventChangeType.UPDATE).count() == 1
def test_azure_sync_resource_type(broker): count_type = len(PROVIDERS) task_id = async_task('mce_tasks_djq.azure.sync_resource_type', broker=broker, sync=True) task = fetch(task_id) assert task.success is True, result(task_id) assert result(task_id) == dict(errors=0, created=count_type, updated=0, deleted=0) assert ResourceType.objects.count() == count_type task = async_task('mce_tasks_djq.azure.sync_resource_type', broker=broker, sync=True) assert result(task) == dict(errors=0, created=0, updated=count_type, deleted=0)
def test_iter(broker): broker.purge_queue() broker.cache.clear() it = [i for i in range(10)] it2 = [(1, -1), (2, -1), (3, -4), (5, 6)] it3 = (1, 2, 3, 4, 5) t = async_iter('math.floor', it, sync=True) t2 = async_iter('math.copysign', it2, sync=True) t3 = async_iter('math.floor', it3, sync=True) t4 = async_iter('math.floor', (1, ), sync=True) result_t = result(t) assert result_t is not None task_t = fetch(t) assert task_t.result == result_t assert result(t2) is not None assert result(t3) is not None assert result(t4)[0] == 1 # test iter class i = Iter('math.copysign', sync=True, cached=True) i.append(1, -1) i.append(2, -1) i.append(3, -4) i.append(5, 6) assert i.started is False assert i.length() == 4 assert i.run() is not None assert len(i.result()) == 4 assert len(i.fetch().result) == 4 i.append(1, -7) assert i.result() is None i.run() assert len(i.result()) == 5
def test_iter(broker): broker.purge_queue() broker.cache.clear() it = [i for i in range(10)] it2 = [(1, -1), (2, -1), (3, -4), (5, 6)] it3 = (1, 2, 3, 4, 5) t = async_iter('math.floor', it, sync=True) t2 = async_iter('math.copysign', it2, sync=True) t3 = async_iter('math.floor', it3, sync=True) t4 = async_iter('math.floor', (1,), sync=True) result_t = result(t) assert result_t is not None task_t = fetch(t) assert task_t.result == result_t assert result(t2) is not None assert result(t3) is not None assert result(t4)[0] == 1 # test iter class i = Iter('math.copysign', sync=True, cached=True) i.append(1, -1) i.append(2, -1) i.append(3, -4) i.append(5, 6) assert i.started is False assert i.length() == 4 assert i.run() is not None assert len(i.result()) == 4 assert len(i.fetch().result) == 4 i.append(1, -7) assert i.result() is None i.run() assert len(i.result()) == 5
def test_scheduler(r): list_key = 'scheduler_test:q' r.delete(list_key) schedule = create_schedule('math.copysign', 1, -1, hook='django_q.tests.tasks.result', schedule_type=Schedule.HOURLY, repeats=1) assert schedule.last_run() is None # run scheduler scheduler(list_key=list_key) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, list_key=list_key, r=r) assert task_queue.qsize() == 1 assert r.llen(list_key) == 0 task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('b', -1)) assert result_queue.qsize() == 1 result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.repeats == 0 assert schedule.last_run() is not None assert schedule.success() is True task = fetch(schedule.task) assert task is not None assert task.success is True assert task.result < 0 for t in Schedule.TYPE: schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=t[0], repeats=1, hook='django_q.tests.tasks.result' ) assert schedule is not None assert schedule.last_run() is None scheduler()
def test_scheduler(r): list_key = 'scheduler_test:q' r.delete(list_key) schedule = create_schedule('math.copysign', 1, -1, hook='django_q.tests.tasks.result', schedule_type=Schedule.HOURLY, repeats=1) assert schedule.last_run() is None # run scheduler scheduler(list_key=list_key) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, list_key=list_key, r=r) assert task_queue.qsize() == 1 assert r.llen(list_key) == 0 task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('b', -1)) assert result_queue.qsize() == 1 result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 schedule.refresh_from_db() assert schedule.repeats == 0 assert schedule.last_run() is not None assert schedule.success() is True task = fetch(schedule.task) assert task is not None assert task.success is True assert task.result < 0 for t in Schedule.TYPE: schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=t[0], repeats=1, hook='django_q.tests.tasks.result') assert schedule is not None assert schedule.last_run() is None
def test_azure_sync_resource_create(get_subscription_and_session, get_resource_by_id, get_resources_list, mock_response_class, json_file, subscription, resource_group, broker, require_resource_types): #resource_group = mce_app_azure_resource_group data_resource_list = json_file("resource-list.json") data_resource = json_file("resource-vm.json") """ resource_id = data_resource['id'] group_name = resource_id.split('/')[4] group_id = f"/subscriptions/{subscription.pk}/resourceGroups/{group_name}" group = ResourceGroupAzure.objects.get(id__iexact=group_id) """ count = len(data_resource_list['value']) get_subscription_and_session.return_value = (subscription, requests.Session()) get_resources_list.return_value = data_resource_list['value'] get_resource_by_id.return_value = data_resource task_id = async_task('mce_tasks_djq.azure.sync_resource', subscription.pk, task_name='test.azure.sync.resource', broker=broker, sync=True) task = fetch(task_id) assert task.success is True, result(task_id) assert result(task_id) == dict(errors=0, created=count, updated=0, deleted=0) assert ResourceAzure.objects.count() == count assert ResourceEventChange.objects.filter( action=constants.EventChangeType.CREATE).count( ) == count + 1 # ResourceGroup
def test_05(request): results = {'status': 0, 'msg': 'ok', 'data': {}} instance = Db_instance.objects.get(id=1) query_engine = get_engine(instance=instance) # return HttpResponse(query_engine) # sql_content = 'select * from test_db.t2 group by a order by a limit 2;' sql_content = 'select * from sbtest.sbtest1 where id=3;' task_id = async_task(query_engine.query_set, sql=sql_content, db_name='sbtest') # return HttpResponse(task_id) query_task = fetch(task_id, wait=3 * 1000) if query_task: if query_task.success: query_result = query_task.result query_result.rows = query_result.to_dict() query_result.query_time = query_task.time_taken() else: query_result = ResultsSet(full_sql=sql_content) query_result.error = query_task.result # 等待超时,async_task主动关闭连接 else: query_result = ResultsSet(full_sql=sql_content) # query_result.error = '查询时间超过 0.1 秒,已被主动终止,请优化语句或者联系管理员。' query_result.error = '3' # 查询异常 if query_result.error: results['status'] = 1 results['msg'] = query_result.error else: results['data'] = query_result.__dict__ return HttpResponse(json.dumps(results))
def recognize_multiple(movie_ids, movies=None, wait_for_finish=True): tasks_ = [] subtasks_ = len(movie_ids) if movie_ids else len(movies) if movies is not None else 0 if settings.USE_ASYNC: current_task, _ = CurrentTask.objects.get_or_create(name='Recognizing') current_task.progress_max += subtasks_ current_task.save() if movie_ids: for movie_id in movie_ids: if settings.USE_ASYNC: task_id = async_task(recognize_movie, None, movie_id, hook=lambda f: hook_set_task_ended(f, name='Recognizing')) tasks_.append(task_id) else: tasks_.append(recognize_movie(None, movie_id)) elif movies is not None: for movie in movies: if settings.USE_ASYNC: task_id = async_task(recognize_movie, movie, None, hook=lambda f: hook_set_task_ended(f, name='Recognizing')) tasks_.append(task_id) else: tasks_.append(recognize_movie(movie, None)) if not settings.USE_ASYNC: return tasks_ if not wait_for_finish: return recognized = [] for task_id in tasks_: res = fetch(task_id, wait=2000) if res is not None and res.result is not None: recognized.append(res.result.serialize()) return recognized
def test_enqueue(broker, admin_user): broker.list_key = "cluster_test:q" broker.delete_queue() a = async_task( "django_q.tests.tasks.count_letters", DEFAULT_WORDLIST, hook="django_q.tests.test_cluster.assert_result", broker=broker, ) b = async_task( "django_q.tests.tasks.count_letters2", WordClass(), hook="django_q.tests.test_cluster.assert_result", broker=broker, ) # unknown argument c = async_task( "django_q.tests.tasks.count_letters", DEFAULT_WORDLIST, "oneargumentoomany", hook="django_q.tests.test_cluster.assert_bad_result", broker=broker, ) # unknown function d = async_task( "django_q.tests.tasks.does_not_exist", WordClass(), hook="django_q.tests.test_cluster.assert_bad_result", broker=broker, ) # function without result e = async_task("django_q.tests.tasks.countdown", 100000, broker=broker) # function as instance f = async_task(multiply, 753, 2, hook=assert_result, broker=broker) # model as argument g = async_task("django_q.tests.tasks.get_task_name", Task(name="John"), broker=broker) # args,kwargs, group and broken hook h = async_task( "django_q.tests.tasks.word_multiply", 2, word="django", hook="fail.me", broker=broker, ) # args unpickle test j = async_task("django_q.tests.tasks.get_user_id", admin_user, broker=broker, group="test_j") # q_options and save opt_out test k = async_task( "django_q.tests.tasks.get_user_id", admin_user, q_options={ "broker": broker, "group": "test_k", "save": False, "timeout": 90 }, ) # test unicode assert Task(name="Amalia").__str__() == "Amalia" # check if everything has a task id assert isinstance(a, str) assert isinstance(b, str) assert isinstance(c, str) assert isinstance(d, str) assert isinstance(e, str) assert isinstance(f, str) assert isinstance(g, str) assert isinstance(h, str) assert isinstance(j, str) assert isinstance(k, str) # run the cluster to execute the tasks task_count = 10 assert broker.queue_size() == task_count task_queue = Queue() stop_event = Event() stop_event.set() # push the tasks for _ in range(task_count): pusher(task_queue, stop_event, broker=broker) assert broker.queue_size() == 0 assert task_queue.qsize() == task_count task_queue.put("STOP") # test wait timeout assert result(j, wait=10) is None assert fetch(j, wait=10) is None assert result_group("test_j", wait=10) is None assert result_group("test_j", count=2, wait=10) is None assert fetch_group("test_j", wait=10) is None assert fetch_group("test_j", count=2, wait=10) is None # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value("f", -1)) assert result_queue.qsize() == task_count result_queue.put("STOP") # store the results monitor(result_queue) assert result_queue.qsize() == 0 # Check the results # task a result_a = fetch(a) assert result_a is not None assert result_a.success is True assert result(a) == 1506 # task b result_b = fetch(b) assert result_b is not None assert result_b.success is True assert result(b) == 1506 # task c result_c = fetch(c) assert result_c is not None assert result_c.success is False # task d result_d = fetch(d) assert result_d is not None assert result_d.success is False # task e result_e = fetch(e) assert result_e is not None assert result_e.success is True assert result(e) is None # task f result_f = fetch(f) assert result_f is not None assert result_f.success is True assert result(f) == 1506 # task g result_g = fetch(g) assert result_g is not None assert result_g.success is True assert result(g) == "John" # task h result_h = fetch(h) assert result_h is not None assert result_h.success is True assert result(h) == 12 # task j result_j = fetch(j) assert result_j is not None assert result_j.success is True assert result_j.result == result_j.args[0].id # check fetch, result by name assert fetch(result_j.name) == result_j assert result(result_j.name) == result_j.result # groups assert result_group("test_j")[0] == result_j.result assert result_j.group_result()[0] == result_j.result assert result_group("test_j", failures=True)[0] == result_j.result assert result_j.group_result(failures=True)[0] == result_j.result assert fetch_group("test_j")[0].id == [result_j][0].id assert fetch_group("test_j", failures=False)[0].id == [result_j][0].id assert count_group("test_j") == 1 assert result_j.group_count() == 1 assert count_group("test_j", failures=True) == 0 assert result_j.group_count(failures=True) == 0 assert delete_group("test_j") == 1 assert result_j.group_delete() == 0 deleted_group = delete_group("test_j", tasks=True) assert deleted_group is None or deleted_group[0] == 0 # Django 1.9 deleted_group = result_j.group_delete(tasks=True) assert deleted_group is None or deleted_group[0] == 0 # Django 1.9 # task k should not have been saved assert fetch(k) is None assert fetch(k, 100) is None assert result(k, 100) is None broker.delete_queue()
def fetch_task(task_id, wait=0, cached=Conf.CACHED): # Wrapper method to fetch a single task with awareness of schema schema_name = connection.schema_name with schema_context(schema_name): return fetch(task_id, wait, cached)
def test_scheduler(broker, monkeypatch): broker.list_key = "scheduler_test:q" broker.delete_queue() schedule = create_schedule( "math.copysign", 1, -1, name="test math", hook="django_q.tests.tasks.result", schedule_type=Schedule.HOURLY, repeats=1, ) assert schedule.last_run() is None # check duplicate constraint with pytest.raises(IntegrityError): schedule = create_schedule( "math.copysign", 1, -1, name="test math", hook="django_q.tests.tasks.result", schedule_type=Schedule.HOURLY, repeats=1, ) # run scheduler scheduler(broker=broker) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, broker=broker) assert task_queue.qsize() == 1 assert broker.queue_size() == 0 task_queue.put("STOP") # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value("b", -1)) assert result_queue.qsize() == 1 result_queue.put("STOP") # store the results monitor(result_queue) assert result_queue.qsize() == 0 schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.repeats == 0 assert schedule.last_run() is not None assert schedule.success() is True assert schedule.next_run < arrow.get(timezone.now()).shift(hours=+1) task = fetch(schedule.task) assert task is not None assert task.success is True assert task.result < 0 # Once schedule with delete once_schedule = create_schedule( "django_q.tests.tasks.word_multiply", 2, word="django", schedule_type=Schedule.ONCE, repeats=-1, hook="django_q.tests.tasks.result", ) assert hasattr(once_schedule, "pk") is True # negative repeats always_schedule = create_schedule( "django_q.tests.tasks.word_multiply", 2, word="django", schedule_type=Schedule.DAILY, repeats=-1, hook="django_q.tests.tasks.result", ) assert hasattr(always_schedule, "pk") is True # Minute schedule minute_schedule = create_schedule( "django_q.tests.tasks.word_multiply", 2, word="django", schedule_type=Schedule.MINUTES, minutes=10, ) assert hasattr(minute_schedule, "pk") is True # Cron schedule cron_schedule = create_schedule( "django_q.tests.tasks.word_multiply", 2, word="django", schedule_type=Schedule.CRON, cron="0 22 * * 1-5", ) assert hasattr(cron_schedule, "pk") is True assert cron_schedule.full_clean() is None assert cron_schedule.__str__() == "django_q.tests.tasks.word_multiply" with pytest.raises(ValidationError): create_schedule( "django_q.tests.tasks.word_multiply", 2, word="django", schedule_type=Schedule.CRON, cron="0 22 * * 1-12", ) # All other types for t in Schedule.TYPE: if t[0] == Schedule.CRON: continue schedule = create_schedule( "django_q.tests.tasks.word_multiply", 2, word="django", schedule_type=t[0], repeats=1, hook="django_q.tests.tasks.result", ) assert schedule is not None assert schedule.last_run() is None scheduler(broker=broker) # via model Schedule.objects.create( func="django_q.tests.tasks.word_multiply", args="2", kwargs='word="django"', schedule_type=Schedule.DAILY, ) # scheduler scheduler(broker=broker) # ONCE schedule should be deleted assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False # Catch up On monkeypatch.setattr(Conf, "CATCH_UP", True) now = timezone.now() schedule = create_schedule( "django_q.tests.tasks.word_multiply", 2, word="catch_up", schedule_type=Schedule.HOURLY, next_run=timezone.now() - timedelta(hours=12), repeats=-1, ) scheduler(broker=broker) schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.next_run < now # Catch up off monkeypatch.setattr(Conf, "CATCH_UP", False) scheduler(broker=broker) schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.next_run > now # Done broker.delete_queue() monkeypatch.setattr(Conf, "PREFIX", "some_cluster_name") # create a schedule on another cluster schedule = create_schedule( "math.copysign", 1, -1, name="test schedule on a another cluster", hook="django_q.tests.tasks.result", schedule_type=Schedule.HOURLY, cluster="some_other_cluster_name", repeats=1, ) # run scheduler scheduler(broker=broker) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, broker=broker) # queue must be empty assert task_queue.qsize() == 0 monkeypatch.setattr(Conf, "PREFIX", "default") # create a schedule on the same cluster schedule = create_schedule( "math.copysign", 1, -1, name="test schedule with no cluster", hook="django_q.tests.tasks.result", schedule_type=Schedule.HOURLY, cluster="default", repeats=1, ) # run scheduler scheduler(broker=broker) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, broker=broker) # queue must contain a task assert task_queue.qsize() == 1
def test_scheduler(broker, monkeypatch): broker.list_key = 'scheduler_test:q' broker.delete_queue() schedule = create_schedule('math.copysign', 1, -1, name='test math', hook='django_q.tests.tasks.result', schedule_type=Schedule.HOURLY, repeats=1) assert schedule.last_run() is None # check duplicate constraint with pytest.raises(IntegrityError): schedule = create_schedule('math.copysign', 1, -1, name='test math', hook='django_q.tests.tasks.result', schedule_type=Schedule.HOURLY, repeats=1) # run scheduler scheduler(broker=broker) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, broker=broker) assert task_queue.qsize() == 1 assert broker.queue_size() == 0 task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('b', -1)) assert result_queue.qsize() == 1 result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.repeats == 0 assert schedule.last_run() is not None assert schedule.success() is True assert schedule.next_run < arrow.get(timezone.now()).shift(hours=+1) task = fetch(schedule.task) assert task is not None assert task.success is True assert task.result < 0 # Once schedule with delete once_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.ONCE, repeats=-1, hook='django_q.tests.tasks.result' ) assert hasattr(once_schedule, 'pk') is True # negative repeats always_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.DAILY, repeats=-1, hook='django_q.tests.tasks.result' ) assert hasattr(always_schedule, 'pk') is True # Minute schedule minute_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.MINUTES, minutes=10) assert hasattr(minute_schedule, 'pk') is True # Cron schedule cron_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.CRON, cron="0 22 * * 1-5") assert hasattr(cron_schedule, 'pk') is True assert cron_schedule.full_clean() is None assert cron_schedule.__str__() == 'django_q.tests.tasks.word_multiply' with pytest.raises(ValidationError): create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.CRON, cron="0 22 * * 1-12") # All other types for t in Schedule.TYPE: if t[0] == Schedule.CRON: continue schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=t[0], repeats=1, hook='django_q.tests.tasks.result' ) assert schedule is not None assert schedule.last_run() is None scheduler(broker=broker) # via model Schedule.objects.create(func='django_q.tests.tasks.word_multiply', args='2', kwargs='word="django"', schedule_type=Schedule.DAILY ) # scheduler scheduler(broker=broker) # ONCE schedule should be deleted assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False # Catch up On monkeypatch.setattr(Conf, 'CATCH_UP', True) now = timezone.now() schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='catch_up', schedule_type=Schedule.HOURLY, next_run=timezone.now() - timedelta(hours=12), repeats=-1 ) scheduler(broker=broker) schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.next_run < now # Catch up off monkeypatch.setattr(Conf, 'CATCH_UP', False) scheduler(broker=broker) schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.next_run > now # Done broker.delete_queue()
def test_async(r): list_key = 'cluster_test:q' r.delete(list_key) a = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, hook='django_q.tests.test_cluster.assert_result', list_key=list_key) b = async('django_q.tests.tasks.count_letters2', WordClass(), hook='django_q.tests.test_cluster.assert_result', list_key=list_key) # unknown argument c = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, 'oneargumentoomany', hook='django_q.tests.test_cluster.assert_bad_result', list_key=list_key) # unknown function d = async('django_q.tests.tasks.does_not_exist', WordClass(), hook='django_q.tests.test_cluster.assert_bad_result', list_key=list_key) # function without result e = async('django_q.tests.tasks.countdown', 100000, list_key=list_key) # function as instance f = async(multiply, 753, 2, hook=assert_result, list_key=list_key) # model as argument g = async('django_q.tests.tasks.get_task_name', Task(name='John'), list_key=list_key) # args and kwargs and broken hook h = async('django_q.tests.tasks.word_multiply', 2, word='django', hook='fail.me', list_key=list_key, redis=r) # check if everything has a task name assert isinstance(a, str) assert isinstance(b, str) assert isinstance(c, str) assert isinstance(d, str) assert isinstance(e, str) assert isinstance(f, str) assert isinstance(g, str) assert isinstance(h, str) # run the cluster to execute the tasks task_count = 8 assert r.llen(list_key) == task_count task_queue = Queue() stop_event = Event() stop_event.set() # push the tasks for i in range(task_count): pusher(task_queue, stop_event, list_key=list_key, r=r) assert r.llen(list_key) == 0 assert task_queue.qsize() == task_count task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('b', -1)) assert result_queue.qsize() == task_count result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 # Check the results # task a result_a = fetch(a) assert result_a is not None assert result_a.success is True assert result(a) == 1506 # task b result_b = fetch(b) assert result_b is not None assert result_b.success is True assert result(b) == 1506 # task c result_c = fetch(c) assert result_c is not None assert result_c.success is False # task d result_d = fetch(d) assert result_d is not None assert result_d.success is False # task e result_e = fetch(e) assert result_e is not None assert result_e.success is True assert result(e) is None # task f result_f = fetch(f) assert result_f is not None assert result_f.success is True assert result(f) == 1506 # task g result_g = fetch(g) assert result_g is not None assert result_g.success is True assert result(g) == 'John' # task h result_h = fetch(h) assert result_h is not None assert result_h.success is True assert result(h) == 12 r.delete(list_key)
def test_async(broker, admin_user): broker.list_key = 'cluster_test:q' broker.delete_queue() a = async ('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, hook='django_q.tests.test_cluster.assert_result', broker=broker) b = async ('django_q.tests.tasks.count_letters2', WordClass(), hook='django_q.tests.test_cluster.assert_result', broker=broker) # unknown argument c = async ('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, 'oneargumentoomany', hook='django_q.tests.test_cluster.assert_bad_result', broker=broker) # unknown function d = async ('django_q.tests.tasks.does_not_exist', WordClass(), hook='django_q.tests.test_cluster.assert_bad_result', broker=broker) # function without result e = async ('django_q.tests.tasks.countdown', 100000, broker=broker) # function as instance f = async (multiply, 753, 2, hook=assert_result, broker=broker) # model as argument g = async ('django_q.tests.tasks.get_task_name', Task(name='John'), broker=broker) # args,kwargs, group and broken hook h = async ('django_q.tests.tasks.word_multiply', 2, word='django', hook='fail.me', broker=broker) # args unpickle test j = async ('django_q.tests.tasks.get_user_id', admin_user, broker=broker, group='test_j') # q_options and save opt_out test k = async ('django_q.tests.tasks.get_user_id', admin_user, q_options={ 'broker': broker, 'group': 'test_k', 'save': False, 'timeout': 90 }) # check if everything has a task id assert isinstance(a, str) assert isinstance(b, str) assert isinstance(c, str) assert isinstance(d, str) assert isinstance(e, str) assert isinstance(f, str) assert isinstance(g, str) assert isinstance(h, str) assert isinstance(j, str) assert isinstance(k, str) # run the cluster to execute the tasks task_count = 10 assert broker.queue_size() == task_count task_queue = Queue() stop_event = Event() stop_event.set() # push the tasks for i in range(task_count): pusher(task_queue, stop_event, broker=broker) assert broker.queue_size() == 0 assert task_queue.qsize() == task_count task_queue.put('STOP') # test wait timeout assert result(j, wait=10) is None assert fetch(j, wait=10) is None assert result_group('test_j', wait=10) is None assert result_group('test_j', count=2, wait=10) is None assert fetch_group('test_j', wait=10) is None assert fetch_group('test_j', count=2, wait=10) is None # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('f', -1)) assert result_queue.qsize() == task_count result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 # Check the results # task a result_a = fetch(a) assert result_a is not None assert result_a.success is True assert result(a) == 1506 # task b result_b = fetch(b) assert result_b is not None assert result_b.success is True assert result(b) == 1506 # task c result_c = fetch(c) assert result_c is not None assert result_c.success is False # task d result_d = fetch(d) assert result_d is not None assert result_d.success is False # task e result_e = fetch(e) assert result_e is not None assert result_e.success is True assert result(e) is None # task f result_f = fetch(f) assert result_f is not None assert result_f.success is True assert result(f) == 1506 # task g result_g = fetch(g) assert result_g is not None assert result_g.success is True assert result(g) == 'John' # task h result_h = fetch(h) assert result_h is not None assert result_h.success is True assert result(h) == 12 # task j result_j = fetch(j) assert result_j is not None assert result_j.success is True assert result_j.result == result_j.args[0].id # check fetch, result by name assert fetch(result_j.name) == result_j assert result(result_j.name) == result_j.result # groups assert result_group('test_j')[0] == result_j.result assert result_j.group_result()[0] == result_j.result assert result_group('test_j', failures=True)[0] == result_j.result assert result_j.group_result(failures=True)[0] == result_j.result assert fetch_group('test_j')[0].id == [result_j][0].id assert fetch_group('test_j', failures=False)[0].id == [result_j][0].id assert count_group('test_j') == 1 assert result_j.group_count() == 1 assert count_group('test_j', failures=True) == 0 assert result_j.group_count(failures=True) == 0 assert delete_group('test_j') == 1 assert result_j.group_delete() == 0 deleted_group = delete_group('test_j', tasks=True) assert deleted_group is None or deleted_group[0] == 0 # Django 1.9 deleted_group = result_j.group_delete(tasks=True) assert deleted_group is None or deleted_group[0] == 0 # Django 1.9 # task k should not have been saved assert fetch(k) is None assert fetch(k, 100) is None assert result(k, 100) is None broker.delete_queue()
def test_async(r, admin_user): list_key = 'cluster_test:q' r.delete(list_key) a = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, hook='django_q.tests.test_cluster.assert_result', list_key=list_key, redis=r) b = async('django_q.tests.tasks.count_letters2', WordClass(), hook='django_q.tests.test_cluster.assert_result', list_key=list_key, redis=r) # unknown argument c = async('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, 'oneargumentoomany', hook='django_q.tests.test_cluster.assert_bad_result', list_key=list_key, redis=r) # unknown function d = async('django_q.tests.tasks.does_not_exist', WordClass(), hook='django_q.tests.test_cluster.assert_bad_result', list_key=list_key, redis=r) # function without result e = async('django_q.tests.tasks.countdown', 100000, list_key=list_key, redis=r) # function as instance f = async(multiply, 753, 2, hook=assert_result, list_key=list_key, redis=r) # model as argument g = async('django_q.tests.tasks.get_task_name', Task(name='John'), list_key=list_key, redis=r) # args,kwargs, group and broken hook h = async('django_q.tests.tasks.word_multiply', 2, word='django', hook='fail.me', list_key=list_key, redis=r) # args unpickle test j = async('django_q.tests.tasks.get_user_id', admin_user, list_key=list_key, group='test_j', redis=r) # q_options and save opt_out test k = async('django_q.tests.tasks.get_user_id', admin_user, q_options={'list_key': list_key, 'group': 'test_k', 'redis': r, 'save': False, 'timeout': 90}) # check if everything has a task id assert isinstance(a, str) assert isinstance(b, str) assert isinstance(c, str) assert isinstance(d, str) assert isinstance(e, str) assert isinstance(f, str) assert isinstance(g, str) assert isinstance(h, str) assert isinstance(j, str) assert isinstance(k, str) # run the cluster to execute the tasks task_count = 10 assert queue_size(list_key=list_key, r=r) == task_count task_queue = Queue() stop_event = Event() stop_event.set() # push the tasks for i in range(task_count): pusher(task_queue, stop_event, list_key=list_key, r=r) assert queue_size(list_key=list_key, r=r) == 0 assert task_queue.qsize() == task_count task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('f', -1)) assert result_queue.qsize() == task_count result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 # Check the results # task a result_a = fetch(a) assert result_a is not None assert result_a.success is True assert result(a) == 1506 # task b result_b = fetch(b) assert result_b is not None assert result_b.success is True assert result(b) == 1506 # task c result_c = fetch(c) assert result_c is not None assert result_c.success is False # task d result_d = fetch(d) assert result_d is not None assert result_d.success is False # task e result_e = fetch(e) assert result_e is not None assert result_e.success is True assert result(e) is None # task f result_f = fetch(f) assert result_f is not None assert result_f.success is True assert result(f) == 1506 # task g result_g = fetch(g) assert result_g is not None assert result_g.success is True assert result(g) == 'John' # task h result_h = fetch(h) assert result_h is not None assert result_h.success is True assert result(h) == 12 # task j result_j = fetch(j) assert result_j is not None assert result_j.success is True assert result_j.result == result_j.args[0].id # check fetch, result by name assert fetch(result_j.name) == result_j assert result(result_j.name) == result_j.result # groups assert result_group('test_j') == [result_j.result] assert result_group('test_j', failures=True) == [result_j.result] assert fetch_group('test_j')[0].id == [result_j][0].id assert fetch_group('test_j', failures=False)[0].id == [result_j][0].id assert count_group('test_j') == 1 assert count_group('test_j', failures=True) == 0 assert delete_group('test_j') == 1 assert delete_group('test_j', tasks=True) is None # task k should not have been saved assert fetch(k) is None r.delete(list_key)
def test_scheduler(r): list_key = 'scheduler_test:q' r.delete(list_key) schedule = create_schedule('math.copysign', 1, -1, name='test math', hook='django_q.tests.tasks.result', schedule_type=Schedule.HOURLY, repeats=1) assert schedule.last_run() is None # run scheduler scheduler(list_key=list_key) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, list_key=list_key, r=r) assert task_queue.qsize() == 1 assert r.llen(list_key) == 0 task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('b', -1)) assert result_queue.qsize() == 1 result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.repeats == 0 assert schedule.last_run() is not None assert schedule.success() is True assert schedule.next_run < arrow.get(timezone.now()).replace(hours=+1) task = fetch(schedule.task) assert task is not None assert task.success is True assert task.result < 0 # Once schedule with delete once_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.ONCE, repeats=-1, hook='django_q.tests.tasks.result') assert hasattr(once_schedule, 'pk') is True # negative repeats always_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.DAILY, repeats=-1, hook='django_q.tests.tasks.result') assert hasattr(always_schedule, 'pk') is True # All other types for t in Schedule.TYPE: schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=t[0], repeats=1, hook='django_q.tests.tasks.result') assert schedule is not None assert schedule.last_run() is None scheduler(list_key=list_key) scheduler(list_key=list_key) # ONCE schedule should be deleted assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False r.delete(list_key)
def query(request): """ 获取SQL查询结果 :param request: :return: """ instance_name = request.POST.get('instance_name') sql_content = request.POST.get('sql_content') db_name = request.POST.get('db_name') limit_num = int(request.POST.get('limit_num', 0)) user = request.user result = {'status': 0, 'msg': 'ok', 'data': {}} try: instance = Instance.objects.get(instance_name=instance_name) except Instance.DoesNotExist: result['status'] = 1 result['msg'] = '实例不存在' return result # 服务器端参数验证 if None in [sql_content, db_name, instance_name, limit_num]: result['status'] = 1 result['msg'] = '页面提交参数可能为空' return HttpResponse(json.dumps(result), content_type='application/json') try: config = SysConfig() # 查询前的检查,禁用语句检查,语句切分 query_engine = get_engine(instance=instance) query_check_info = query_engine.query_check(db_name=db_name, sql=sql_content) if query_check_info.get('bad_query'): # 引擎内部判断为 bad_query result['status'] = 1 result['msg'] = query_check_info.get('msg') return HttpResponse(json.dumps(result), content_type='application/json') if query_check_info.get( 'has_star') and config.get('disable_star') is True: # 引擎内部判断为有 * 且禁止 * 选项打开 result['status'] = 1 result['msg'] = query_check_info.get('msg') return HttpResponse(json.dumps(result), content_type='application/json') sql_content = query_check_info['filtered_sql'] # 查询权限校验,并且获取limit_num priv_check_info = query_priv_check(user, instance, db_name, sql_content, limit_num) if priv_check_info['status'] == 0: limit_num = priv_check_info['data']['limit_num'] priv_check = priv_check_info['data']['priv_check'] else: result['status'] = 1 result['msg'] = priv_check_info['msg'] return HttpResponse(json.dumps(result), content_type='application/json') # explain的limit_num设置为0 limit_num = 0 if re.match(r"^explain", sql_content.lower()) else limit_num # 对查询sql增加limit限制或者改写语句 sql_content = query_engine.filter_sql(sql=sql_content, limit_num=limit_num) # 执行查询语句,timeout=max_execution_time max_execution_time = int(config.get('max_execution_time', 60)) query_task_id = async_task(query_engine.query, db_name=str(db_name), sql=sql_content, limit_num=limit_num, timeout=max_execution_time, cached=60) # 等待执行结果,max_execution_time后还没有返回结果代表将会被终止 query_task = fetch(query_task_id, wait=max_execution_time * 1000, cached=True) # 在max_execution_time内执行结束 if query_task: if query_task.success: query_result = query_task.result query_result.query_time = query_task.time_taken() else: query_result = ResultSet(full_sql=sql_content) query_result.error = query_task.result # 等待超时,async_task主动关闭连接 else: query_result = ResultSet(full_sql=sql_content) query_result.error = f'查询时间超过 {max_execution_time} 秒,已被主动终止,请优化语句或者联系管理员。' # 查询异常 if query_result.error: result['status'] = 1 result['msg'] = query_result.error # 数据脱敏,仅对查询无错误的结果集进行脱敏,并且按照query_check配置是否返回 elif config.get('data_masking'): query_masking_task_id = async_task(query_engine.query_masking, db_name=db_name, sql=sql_content, resultset=query_result, cached=60) query_masking_task = fetch(query_masking_task_id, wait=60 * 1000, cached=True) if query_masking_task.success: masking_result = query_masking_task.result masking_result.mask_time = query_masking_task.time_taken() # 脱敏出错 if masking_result.error: # 开启query_check,直接返回异常,禁止执行 if config.get('query_check'): result['status'] = 1 result['msg'] = masking_result.error # 关闭query_check,忽略错误信息,返回未脱敏数据,权限校验标记为跳过 else: query_result.error = None priv_check = False result['data'] = query_result.__dict__ # 正常脱敏 else: result['data'] = masking_result.__dict__ else: logger.error( f'数据脱敏异常,查询语句:{sql_content}\n,错误信息:{traceback.format_exc()}' ) # 抛出未定义异常,并且开启query_check,直接返回异常,禁止执行 if config.get('query_check'): result['status'] = 1 result[ 'msg'] = f'数据脱敏异常,请联系管理员,错误信息:{query_masking_task.result}' # 关闭query_check,忽略错误信息,返回未脱敏数据,权限校验标记为跳过 else: query_result.error = None priv_check = False result['data'] = query_result.__dict__ # 无需脱敏的语句 else: result['data'] = query_result.__dict__ # 仅将成功的查询语句记录存入数据库 if not query_result.error: if int(limit_num) == 0: limit_num = int(query_result.affected_rows) else: limit_num = min(int(limit_num), int(query_result.affected_rows)) query_log = QueryLog(username=user.username, user_display=user.display, db_name=db_name, instance_name=instance.instance_name, sqllog=sql_content, effect_row=limit_num, cost_time=query_result.query_time, priv_check=priv_check, hit_rule=query_result.mask_rule_hit, masking=query_result.is_masked) # 防止查询超时 try: query_log.save() except OperationalError: connection.close() query_log.save() except Exception as e: logger.error( f'查询异常报错,查询语句:{sql_content}\n,错误信息:{traceback.format_exc()}') result['status'] = 1 result['msg'] = f'查询异常报错,错误信息:{e}' return HttpResponse(json.dumps(result), content_type='application/json') # 返回查询结果 try: return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json') # 虽然能正常返回,但是依然会乱码 except UnicodeDecodeError: return HttpResponse(json.dumps(result, default=str, bigint_as_string=True, encoding='latin1'), content_type='application/json')
def test_scheduler(r): list_key = 'scheduler_test:q' r.delete(list_key) schedule = create_schedule('math.copysign', 1, -1, name='test math', hook='django_q.tests.tasks.result', schedule_type=Schedule.HOURLY, repeats=1) assert schedule.last_run() is None # run scheduler scheduler(list_key=list_key) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, list_key=list_key, r=r) assert task_queue.qsize() == 1 assert r.llen(list_key) == 0 task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('b', -1)) assert result_queue.qsize() == 1 result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.repeats == 0 assert schedule.last_run() is not None assert schedule.success() is True assert schedule.next_run < arrow.get(timezone.now()).replace(hours=+1) task = fetch(schedule.task) assert task is not None assert task.success is True assert task.result < 0 # Once schedule with delete once_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.ONCE, repeats=-1, hook='django_q.tests.tasks.result' ) assert hasattr(once_schedule, 'pk') is True # negative repeats always_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.DAILY, repeats=-1, hook='django_q.tests.tasks.result' ) assert hasattr(always_schedule, 'pk') is True # All other types for t in Schedule.TYPE: schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=t[0], repeats=1, hook='django_q.tests.tasks.result' ) assert schedule is not None assert schedule.last_run() is None scheduler(list_key=list_key) scheduler(list_key=list_key) # ONCE schedule should be deleted assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False r.delete(list_key)
def test_scheduler(broker): broker.list_key = 'scheduler_test:q' broker.delete_queue() schedule = create_schedule('math.copysign', 1, -1, name='test math', hook='django_q.tests.tasks.result', schedule_type=Schedule.HOURLY, repeats=1) assert schedule.last_run() is None # run scheduler scheduler(broker=broker) # set up the workflow task_queue = Queue() stop_event = Event() stop_event.set() # push it pusher(task_queue, stop_event, broker=broker) assert task_queue.qsize() == 1 assert broker.queue_size() == 0 task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('b', -1)) assert result_queue.qsize() == 1 result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.repeats == 0 assert schedule.last_run() is not None assert schedule.success() is True assert schedule.next_run < arrow.get(timezone.now()).replace(hours=+1) task = fetch(schedule.task) assert task is not None assert task.success is True assert task.result < 0 # Once schedule with delete once_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.ONCE, repeats=-1, hook='django_q.tests.tasks.result' ) assert hasattr(once_schedule, 'pk') is True # negative repeats always_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.DAILY, repeats=-1, hook='django_q.tests.tasks.result' ) assert hasattr(always_schedule, 'pk') is True # Minute schedule minute_schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=Schedule.MINUTES, minutes=10) assert hasattr(minute_schedule, 'pk') is True # All other types for t in Schedule.TYPE: schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='django', schedule_type=t[0], repeats=1, hook='django_q.tests.tasks.result' ) assert schedule is not None assert schedule.last_run() is None scheduler(broker=broker) # via model Schedule.objects.create(func='django_q.tests.tasks.word_multiply', args='2', kwargs='word="django"', schedule_type=Schedule.DAILY ) # scheduler scheduler(broker=broker) # ONCE schedule should be deleted assert Schedule.objects.filter(pk=once_schedule.pk).exists() is False # Catch up On Conf.CATCH_UP = True now = timezone.now() schedule = create_schedule('django_q.tests.tasks.word_multiply', 2, word='catch_up', schedule_type=Schedule.HOURLY, next_run=timezone.now() - timedelta(hours=12), repeats=-1 ) scheduler(broker=broker) schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.next_run < now # Catch up off Conf.CATCH_UP = False scheduler(broker=broker) schedule = Schedule.objects.get(pk=schedule.pk) assert schedule.next_run > now # Done broker.delete_queue()
def test_async(r): list_key = 'cluster_test:q' r.delete(list_key) a = async ('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, hook='django_q.tests.test_cluster.assert_result', list_key=list_key) b = async ('django_q.tests.tasks.count_letters2', WordClass(), hook='django_q.tests.test_cluster.assert_result', list_key=list_key) # unknown argument c = async ('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, 'oneargumentoomany', hook='django_q.tests.test_cluster.assert_bad_result', list_key=list_key) # unknown function d = async ('django_q.tests.tasks.does_not_exist', WordClass(), hook='django_q.tests.test_cluster.assert_bad_result', list_key=list_key) # function without result e = async ('django_q.tests.tasks.countdown', 100000, list_key=list_key) # function as instance f = async (multiply, 753, 2, hook=assert_result, list_key=list_key) # model as argument g = async ('django_q.tests.tasks.get_task_name', Task(name='John'), list_key=list_key) # args and kwargs and broken hook h = async ('django_q.tests.tasks.word_multiply', 2, word='django', hook='fail.me', list_key=list_key, redis=r) # check if everything has a task name assert isinstance(a, str) assert isinstance(b, str) assert isinstance(c, str) assert isinstance(d, str) assert isinstance(e, str) assert isinstance(f, str) assert isinstance(g, str) assert isinstance(h, str) # run the cluster to execute the tasks task_count = 8 assert r.llen(list_key) == task_count task_queue = Queue() stop_event = Event() stop_event.set() # push the tasks for i in range(task_count): pusher(task_queue, stop_event, list_key=list_key, r=r) assert r.llen(list_key) == 0 assert task_queue.qsize() == task_count task_queue.put('STOP') # let a worker handle them result_queue = Queue() worker(task_queue, result_queue, Value('b', -1)) assert result_queue.qsize() == task_count result_queue.put('STOP') # store the results monitor(result_queue) assert result_queue.qsize() == 0 # Check the results # task a result_a = fetch(a) assert result_a is not None assert result_a.success is True assert result(a) == 1506 # task b result_b = fetch(b) assert result_b is not None assert result_b.success is True assert result(b) == 1506 # task c result_c = fetch(c) assert result_c is not None assert result_c.success is False # task d result_d = fetch(d) assert result_d is not None assert result_d.success is False # task e result_e = fetch(e) assert result_e is not None assert result_e.success is True assert result(e) is None # task f result_f = fetch(f) assert result_f is not None assert result_f.success is True assert result(f) == 1506 # task g result_g = fetch(g) assert result_g is not None assert result_g.success is True assert result(g) == 'John' # task h result_h = fetch(h) assert result_h is not None assert result_h.success is True assert result(h) == 12 r.delete(list_key)