def test_iter(broker): broker.purge_queue() broker.cache.clear() it = [i for i in range(10)] it2 = [(1, -1), (2, -1), (3, -4), (5, 6)] it3 = (1, 2, 3, 4, 5) t = async_iter('math.floor', it, sync=True) t2 = async_iter('math.copysign', it2, sync=True) t3 = async_iter('math.floor', it3, sync=True) t4 = async_iter('math.floor', (1, ), sync=True) result_t = result(t) assert result_t is not None task_t = fetch(t) assert task_t.result == result_t assert result(t2) is not None assert result(t3) is not None assert result(t4)[0] == 1 # test iter class i = Iter('math.copysign', sync=True, cached=True) i.append(1, -1) i.append(2, -1) i.append(3, -4) i.append(5, 6) assert i.started is False assert i.length() == 4 assert i.run() is not None assert len(i.result()) == 4 assert len(i.fetch().result) == 4 i.append(1, -7) assert i.result() is None i.run() assert len(i.result()) == 5
def test_iter(broker): broker.purge_queue() broker.cache.clear() it = [i for i in range(10)] it2 = [(1, -1), (2, -1), (3, -4), (5, 6)] it3 = (1, 2, 3, 4, 5) t = async_iter('math.floor', it, sync=True) t2 = async_iter('math.copysign', it2, sync=True) t3 = async_iter('math.floor', it3, sync=True) t4 = async_iter('math.floor', (1,), sync=True) result_t = result(t) assert result_t is not None task_t = fetch(t) assert task_t.result == result_t assert result(t2) is not None assert result(t3) is not None assert result(t4)[0] == 1 # test iter class i = Iter('math.copysign', sync=True, cached=True) i.append(1, -1) i.append(2, -1) i.append(3, -4) i.append(5, 6) assert i.started is False assert i.length() == 4 assert i.run() is not None assert len(i.result()) == 4 assert len(i.fetch().result) == 4 i.append(1, -7) assert i.result() is None i.run() assert len(i.result()) == 5
def test_cached(broker): broker.purge_queue() broker.cache.clear() group = 'cache_test' # queue the tests task = async_task('math.copysign', 1, -1, cached=True, broker=broker) task_id = task['id'] async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.popysign', 1, -1, cached=True, broker=broker, group=group) iter_id = async_iter('math.floor', [i for i in range(10)], cached=True) # test wait on cache # test wait timeout assert result(task_id, wait=10, cached=True) is None assert fetch(task_id, wait=10, cached=True) is None assert result_group(group, wait=10, cached=True) is None assert result_group(group, count=2, wait=10, cached=True) is None assert fetch_group(group, wait=10, cached=True) is None assert fetch_group(group, count=2, wait=10, cached=True) is None # run a single inline cluster task_count = 17 assert broker.queue_size() == task_count task_queue = Queue() stop_event = Event() stop_event.set() for i in range(task_count): pusher(task_queue, stop_event, broker=broker) assert broker.queue_size() == 0 assert task_queue.qsize() == task_count task_queue.put('STOP') result_queue = Queue() worker(task_queue, result_queue, Value('f', -1)) assert result_queue.qsize() == task_count result_queue.put('STOP') monitor(result_queue) assert result_queue.qsize() == 0 # assert results assert result(task_id, wait=500, cached=True) == -1 assert fetch(task_id, wait=500, cached=True).result == -1 # make sure it's not in the db backend assert fetch(task_id) is None # assert group assert count_group(group, cached=True) == 6 assert count_group(group, cached=True, failures=True) == 1 assert result_group(group, cached=True) == [-1, -1, -1, -1, -1] assert len(result_group(group, cached=True, failures=True)) == 6 assert len(fetch_group(group, cached=True)) == 6 assert len(fetch_group(group, cached=True, failures=False)) == 5 delete_group(group, cached=True) assert count_group(group, cached=True) is None delete_cached(task_id) assert result(task_id, cached=True) is None assert fetch(task_id, cached=True) is None # iter cached assert result(iter_id) is None assert result(iter_id, cached=True) is not None broker.cache.clear()
def test_cached(broker): broker.purge_queue() broker.cache.clear() group = 'cache_test' # queue the tests task_id = async_task('math.copysign', 1, -1, cached=True, broker=broker) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group) async_task('math.popysign', 1, -1, cached=True, broker=broker, group=group) iter_id = async_iter('math.floor', [i for i in range(10)], cached=True) # test wait on cache # test wait timeout assert result(task_id, wait=10, cached=True) is None assert fetch(task_id, wait=10, cached=True) is None assert result_group(group, wait=10, cached=True) is None assert result_group(group, count=2, wait=10, cached=True) is None assert fetch_group(group, wait=10, cached=True) is None assert fetch_group(group, count=2, wait=10, cached=True) is None # run a single inline cluster task_count = 17 assert broker.queue_size() == task_count task_queue = Queue() stop_event = Event() stop_event.set() for i in range(task_count): pusher(task_queue, stop_event, broker=broker) assert broker.queue_size() == 0 assert task_queue.qsize() == task_count task_queue.put('STOP') result_queue = Queue() worker(task_queue, result_queue, Value('f', -1)) assert result_queue.qsize() == task_count result_queue.put('STOP') monitor(result_queue) assert result_queue.qsize() == 0 # assert results assert result(task_id, wait=500, cached=True) == -1 assert fetch(task_id, wait=500, cached=True).result == -1 # make sure it's not in the db backend assert fetch(task_id) is None # assert group assert count_group(group, cached=True) == 6 assert count_group(group, cached=True, failures=True) == 1 assert result_group(group, cached=True) == [-1, -1, -1, -1, -1] assert len(result_group(group, cached=True, failures=True)) == 6 assert len(fetch_group(group, cached=True)) == 6 assert len(fetch_group(group, cached=True, failures=False)) == 5 delete_group(group, cached=True) assert count_group(group, cached=True) is None delete_cached(task_id) assert result(task_id, cached=True) is None assert fetch(task_id, cached=True) is None # iter cached assert result(iter_id) is None assert result(iter_id, cached=True) is not None broker.cache.clear()
def add_task_id(self, data, args): """ Calls async_iter to generate a django_q task id and set the qcluster process to work processing the structure search. The user will then be able to search for it but this gives an extra second or so while the user considers pressing the apply key""" data["id"] = async_iter('cbh_chem_api.tasks.get_structure_search_for_projects', args) caches[settings.SESSION_CACHE_ALIAS].set("structure_search__%s" % data["id"], data) return data
def parzen_async(): mu_vec = numpy.array([0, 0]) cov_mat = numpy.array([[1, 0], [0, 1]]) sample = numpy.random.multivariate_normal(mu_vec, cov_mat, 10000) widths = numpy.linspace(1.0, 1.2, 100) x = numpy.array([[0], [0]]) # async_task them with async_task iterable args = [(sample, x, w) for w in widths] result_id = async_iter(parzen_estimation, args, cached=True) # return the cached result or timeout after 10 seconds return result(result_id, wait=10000, cached=True)
def add_task_id(self, data, args): """ Calls async_iter to generate a django_q task id and set the qcluster process to work processing the structure search. The user will then be able to search for it but this gives an extra second or so while the user considers pressing the apply key""" data["id"] = async_iter( 'cbh_chem_api.tasks.get_structure_search_for_projects', args) caches[settings.SESSION_CACHE_ALIAS].set( "structure_search__%s" % data["id"], data) return data
def save_related(self, request, form, formsets, change): super().save_related(request, form, formsets, change) articles = [entry for entry in form.instance.articles_long.all()] async_iter(Article.fill_header_photo_from_url, articles)
def save_related(self, request, form, formsets, change): super().save_related(request, form, formsets, change) news = [entry for entry in form.instance.news_long.all()] async_iter(News.fill_img_from_url, news)