def test_doc_job_return_doc_saved(mock_dir, client): rm = RedisManager() rm.add_to_queue( json.dumps({ "data": [{ "id": "AAAA-AAAA-0001-0001", "count": 1 }], "version": "v0.5", "type": "doc", "job_id": "1234" })) result = client.get('/get_work?client_id=asdf') client.post("/return_doc", data={ 'file': open(PATH + "test_single_doc.zip", 'rb'), 'json': json.dumps({ 'job_id': "1234", 'type': 'doc', 'client_id': "abcd", "version": "0.5" }) }) assert len(rm.get_all_items_in_queue()) == 0 count = get_count_of_doc() assert count == 1 shutil.rmtree(config.server_read_value('regulations path')) assert rm.does_job_exist_in_progress('1234') is False
def test_when_two_jobs_in_db_returned_by_get_work(client): rm = RedisManager() rm.add_to_queue( json.dumps({ 'job_id': '1234', 'type': 'docs', 'data': ['Url1'], 'version': '0.5' })) rm.add_to_queue( json.dumps({ 'job_id': '3456', 'type': 'docs', 'data': ['Url2'], 'version': '0.5' })) result = client.get('/get_work?client_id=asdf') result_two = client.get('/get_work?client_id=asdf') assert { 'job_id': '1234', 'type': 'docs', 'data': ['Url1'], 'version': '0.5' } == json.loads(result.data) assert { 'job_id': '3456', 'type': 'docs', 'data': ['Url2'], 'version': '0.5' } == json.loads(result_two.data) assert rm.does_job_exist_in_progress('1234') assert rm.does_job_exist_in_progress('3456')
def expire(): """ Checks to see if any of the in-progress jobs have expired :return: """ while True: RedisManager().find_expired() time.sleep(3600)
def make_database(): r = RedisManager() r.delete_all() list = json.dumps({"A": "a", "B": ["b", "c"]}) list2 = json.dumps({"D": "d", "E": ["e", "f"]}) list3 = json.dumps({"G": "g", "H": ["h", "i"]}) r.add_to_queue(list) r.add_to_queue(list2) r.add_to_progress(list3) return r
def make_database(reset, lock): r = RedisManager(fakeredis.FakeRedis()) r.delete_all() list = json.dumps({"A":"a", "B":["b", "c"]}) list2 = json.dumps({"D":"d", "E":["e", "f"]}) list3 = json.dumps({"G":"g", "H":["h", "i"]}) r.add_to_queue(list) r.add_to_queue(list2) r.add_to_queue(list3) return r
def test_docs_job_return_multiple_doc_place_in_db_queue(client): rm = RedisManager() rm.add_to_queue( json.dumps({ 'data': ['Url1'], 'version': 'v0.5', 'type': 'docs', 'job_id': '1234' })) result = client.get('/get_work?client_id=asdf') assert { 'data': ['Url1'], 'version': 'v0.5', 'type': 'docs', 'job_id': '1234' } == json.loads(result.data) assert rm.does_job_exist_in_progress('1234') is True client.post('/return_docs', data={ 'file': open(PATH + 'Archive.zip', 'rb'), 'json': json.dumps({ 'job_id': '1234', 'type': 'docs', 'data': [[{ 'id': 'AHRQ_FRDOC_0001-0037', 'count': 1 }], [{ 'id': 'AHRQ_FRDOC_0002-0037', 'count': 2 }]], 'client_id': 'abcd', 'version': '0.5' }) }) assert len(rm.get_all_items_in_queue()) == 2 assert rm.does_job_exist_in_progress('1234') is False
def test_docs_job_return_1000_doc_place_in_db_queue_with_helper_method_and_1000_archive( client): rm = RedisManager() return_data = return_data_ids(1000) rm.add_to_queue( json.dumps({ 'data': ['Url1'], 'version': 'v0.5', 'type': 'docs', 'job_id': '1234' })) result = client.get('/get_work?client_id=asdf') assert { 'data': ['Url1'], 'version': 'v0.5', 'type': 'docs', 'job_id': '1234' } == json.loads(result.data) assert rm.does_job_exist_in_progress('1234') is True client.post('/return_docs', data={ 'file': open(PATH + 'Big_Archive.zip', 'rb'), 'json': json.dumps({ 'job_id': '1234', 'type': 'docs', 'data': json.loads(return_data), 'client_id': 'abcd', 'version': '0.5' }) }) assert len(rm.get_all_items_in_queue()) == 1000 assert rm.does_job_exist_in_progress('1234') is False
def test_docs_job_return_multiple_doc_place_in_db_queue_with_files(client): rm = RedisManager() with open(PATH + 'return_data.txt', 'r') as file: return_data = file.read().replace('\n', '') rm.add_to_queue( json.dumps({ 'data': ['Url1'], 'version': 'v0.5', 'type': 'docs', 'job_id': '1234' })) result = client.get('/get_work?client_id=asdf') assert { 'data': ['Url1'], 'version': 'v0.5', 'type': 'docs', 'job_id': '1234' } == json.loads(result.data) assert rm.does_job_exist_in_progress('1234') is True client.post('/return_docs', data={ 'file': open(PATH + 'Archive.zip', 'rb'), 'json': json.dumps({ 'job_id': '1234', 'type': 'docs', 'data': json.loads(return_data), 'client_id': 'abcd', 'version': '0.5' }) }) assert len(rm.get_all_items_in_queue()) == 2 assert rm.does_job_exist_in_progress('1234') is False
def emptydatabase(): r = RedisManager() return r
from mirrulations_server.redis_manager import RedisManager def queue_check(r): return r.get_all_items_in_progress_no_lock(), \ r.get_all_items_in_queue_no_lock() if __name__ == '__main__': r = RedisManager() progress, queue = queue_check(r) print(progress) print(queue)
def make_database(reset, lock): r = RedisManager() r.delete_all() return r
def make_database(reset, lock): r = RedisManager(fakeredis.FakeRedis()) r.delete_all() return r
import redis from mirrulations_server.redis_manager import RedisManager def queue_check(r): return r.get_all_items_in_progress_no_lock( ), r.get_all_items_in_queue_no_lock() if __name__ == '__main__': r = RedisManager(redis.Redis()) progress, queue = queue_check(r) print(progress) print(queue)
def emptydatabase(): r = RedisManager(fakeredis.FakeRedis()) return r
def redis_server(): return RedisManager(redis.Redis())
import requests_mock import mock import fakeredis import mirrulations_server.endpoints as endpoints from mirrulations_server.redis_manager import RedisManager import json import os from ast import literal_eval PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../test_files/mirrulations_files/filename.txt") version = 'v1.3' endpoints.redis_server = mock.Mock( return_value=RedisManager(fakeredis.FakeRedis())) @pytest.fixture def mock_req(): with requests_mock.Mocker() as m: yield m @pytest.fixture def client(): endpoints.app.config['TESTING'] = True client = endpoints.app.test_client() yield client