def redis(): r = FakeStrictRedis() r.sadd("packages-snapshot", "test1", "test2", "test3") r.hmset("profiles-snapshot", {"8devices_carambola": "ramips/rt305x"}) r.sadd("targets-snapshot", "testtarget/testsubtarget") r.hmset("profiles-snapshot", {"testprofile": "testtarget/testsubtarget"}) yield r
def __init__(self, db=0, charset='utf-8', errors='strict', decode_responses=False, connected=True, **kwargs): db = int(kwargs.get("port", 0)) if db not in port_2_fake_server: port_2_fake_server[db] = server = fakeredis.FakeServer() else: server = port_2_fake_server[db] if 'connection_pool' in kwargs: kwargs.pop('connection_pool') # 新版的fakeredis的不同的存储是通过server来控制的,server相同则存储相同 FakeStrictRedis.__init__(self, db=0, charset=charset, errors=errors, decode_responses=decode_responses, connected=connected, server=server, **kwargs)
class TestWeb(unittest.TestCase): def setUp(self): self.f = "test_trucks.csv" with open(self.f, "w") as out: out.write("Applicant,Address,FoodItems,Status,Latitude,Longitude") out.write("\n") out.write("MyTruck,1234 Main St,asdf:fdsa,APPROVED,37.0,-122.0") self.lat = 37.0 self.lon = -122.0 self.rad = 100.0 self.name = "MyTruck" self.red = FakeStrictRedis() load_trucks(self.f, self.red) def tearDown(self): os.remove(self.f) def test_load_trucks(self): #Scan through all the keys, looking for leaf nodes with trucks for key in self.red.keys(): if key == "root": continue #otherwise get type error trucks = deser(self.red.get(key))[1] if trucks: #If leaf node with truck list self.assertEqual(trucks[0].name, self.name) return self.assertTrue(False) #If no trucks found def test_get_trucks(self): for truck in get_trucks(self.lat, self.lon, self.rad, self.red, self.red.get("root")): self.assertEqual(truck.name, self.name) return self.assertTrue(False) #If no trucks found
class AsyncTaskTest(TestCase): def setUp(self): self.get_redis = patch('celery_once.backends.redis.get_redis') self.mocked_redis = self.get_redis.start() self.redis = FakeStrictRedis() self.mocked_redis.return_value = self.redis @patch('delft3dcontainermanager.tasks.call_command') def test_delft3dgt_pulse(self, mockCall): """ Assert that de delft3dgt_pulse task calls the containersync_sceneupdate() only once. """ delft3dgt_pulse.delay() # Set redis key with TTL 100 seconds from now # so subsequent tasks won't run self.redis.set('qo_delft3dcontainermanager.tasks.delft3dgt_pulse', int(time()) + 100) delft3dgt_pulse.delay() delft3dgt_pulse.delay() mockCall.assert_called_with('containersync_sceneupdate') self.assertEqual(mockCall.call_count, 1) def tearDown(self): self.redis.flushall() self.get_redis.stop()
def redis(): r = FakeStrictRedis() r.sadd("packages-snapshot-testtarget/testsubtarget", "test1", "test2", "test3") r.hmset("profiles-snapshot", {"testprofile": "testtarget/testsubtarget"}) r.hmset("mapping-snapshot", {"testvendor,testprofile": "testprofile"}) r.sadd("targets-snapshot", "testtarget/testsubtarget") yield r
def before(self): super(TestEnd, self).before() self.redis = FakeStrictRedis() self.redis.zadd('game:1:scores', 100, 100) self.redis.zadd('game:1:scores', 110, 110) self.resource = End(self.redis) self.api.add_route('/end/{game_id}', self.resource) self.api.req_options.auto_parse_form_urlencoded = True
def setUp(self): server = FakeServer() server.connected = True self.store = RedisStore(prefix='prefix:', redis_class=FakeStrictRedis, server=server) self.redis = FakeStrictRedis(server=server) super(RedisStoreTestCase, self).setUp()
def test_artefact_add_large() -> None: """Test adding large artefacts to a Redis store.""" db = Redis() store = RedisStorage(db, block_size=8) art = _graph.variable_artefact(db, hash_t("1"), "file", cons.Encoding.blob) data = b"12345" * 100 _graph.set_data(db, store, art.hash, data, _graph.ArtefactStatus.done) data2 = _graph.get_data(db, store, art) assert db.llen(cons.join(cons.ARTEFACTS, art.hash, "data")) == 63 assert data == data2
def setUp(self): self.scene = Scene.objects.create( name='Scene', phase=Scene.phases.new ) self.container_1_1 = Container.objects.create( scene=self.scene, container_type='preprocess', docker_id='abcdefg' ) self.container_1_0 = Container.objects.create( scene=self.scene, container_type='delft3d', docker_id='' ) self.container_0_1 = Container.objects.create( scene=self.scene, container_type='process', docker_id='hijklmn' ) # a freshly created Scene self.scene_new = Scene.objects.create( name='Scene_New', phase=Scene.phases.fin ) self.container_1_1_new = Container.objects.create( scene=self.scene_new, container_type='preprocess', desired_state='created', docker_state='non-existent', docker_id='' ) self.container_1_0_new = Container.objects.create( scene=self.scene_new, container_type='delft3d', desired_state='created', docker_state='non-existent', docker_id='' ) self.container_0_1_new = Container.objects.create( scene=self.scene_new, container_type='process', desired_state='created', docker_state='non-existent', docker_id='' ) self.get_redis = patch('celery_once.backends.redis.get_redis') self.mocked_redis = self.get_redis.start() self.redis = FakeStrictRedis() self.mocked_redis.return_value = self.redis
def zrevrange(self, key, start=0, end=-1, withscores=False): return FakeStrictRedis.zrange(self, key, start, end, desc=True, withscores=withscores)
def test_retrieve_missing(tmpdir): source_root = tmpdir.mkdir("source") repo_root = tmpdir.mkdir("repo") # Create new source files (source_root.join("simple_2019-03-01.txt")).open("w").close() (source_root.join("simple_2019-03-02.txt")).open("w").close() (source_root.join("simple_2019-03-03.txt")).open("w").close() (source_root.join("simple_2019-03-04.txt")).open("w").close() p = Profile("foo", FilesystemDriver, dict(root=str(source_root))) r = Repository( "foo", period="1 days", start=dt.datetime(2019, 3, 1), profile=p, targets=dict(default="empty_{time:%Y-%m-%d}.dat"), configuration=dict(patterns=dict( default="simple_{time:%Y-%m-%d}.txt")), ) ref_time = dt.datetime(2019, 3, 5) redis_conn = FakeStrictRedis() runner.retrieve_missing(repo_root, [r], redis_conn=redis_conn, is_async=False, ref_time=ref_time) assert len(list(glob.glob(str(repo_root.join("foo/*.dat"))))) == 4
def init_app(self, app, **kwargs): redis_url = app.config.setdefault("REDIS_URL", "redis://localhost:6379/0") if self._redis_client is None: if redis_url == ":fake:": self._redis_client = FakeStrictRedis() else: self._redis_client = Redis.from_url(redis_url, **kwargs)
def __init__(self, old_redis_url, new_redis_url, dry_run=True, per_recording_list=False, s3_import=False, s3_root=None): self.old_redis = StrictRedis.from_url(old_redis_url, decode_responses=True) self.dry_run = dry_run self.per_recording_list = per_recording_list self.s3_import = s3_import if s3_import: assert (s3_root) import boto3 self.s3_root = s3_root self.s3 = boto3.client('s3') else: self.s3_root = None self.s3 = None if self.dry_run: import redis redis.StrictRedis = fakeredis.FakeStrictRedis self.redis = FakeStrictRedis.from_url(new_redis_url, decode_responses=True) else: self.redis = StrictRedis.from_url(new_redis_url, decode_responses=True) print('Redis Inited') self.cli = CLIUserManager(new_redis_url)
def test_record_param_user_coll_write_dupe_no_revisit(self): warc_path = to_path(self.root_dir + '/warcs/{user}/{coll}/') dedup_index = self._get_dedup_index(dupe_policy=WriteDupePolicy()) writer = PerRecordWARCWriter(warc_path, dedup_index=dedup_index) recorder_app = RecorderApp(self.upstream_url, writer) resp = self._test_warc_write(recorder_app, 'httpbin.org', '/get?foo=bar', '¶m.recorder.user=USER¶m.recorder.coll=COLL') assert b'HTTP/1.1 200 OK' in resp.body assert b'"foo": "bar"' in resp.body self._test_all_warcs('/warcs/USER/COLL/', 3) r = FakeStrictRedis.from_url('redis://localhost/2') res = r.zrangebylex('USER:COLL:cdxj', '[org,httpbin)/', '(org,httpbin,') assert len(res) == 3 mimes = [CDXObject(x)['mime'] for x in res] assert sorted(mimes) == ['application/json', 'application/json', 'warc/revisit'] assert len(writer.fh_cache) == 0
def test_transaction_no_error( sentry_init, capture_events, DictionaryContaining # noqa:N803 ): sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) events = capture_events() queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) queue.enqueue(do_trick, "Maisey", trick="kangaroo") worker.work(burst=True) envelope = events[0] assert envelope["type"] == "transaction" assert envelope["contexts"]["trace"]["op"] == "rq.task" assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick" assert envelope["extra"]["rq-job"] == DictionaryContaining({ "args": ["Maisey"], "kwargs": { "trick": "kangaroo" }, "func": "tests.integrations.rq.test_rq.do_trick", "description": "tests.integrations.rq.test_rq.do_trick('Maisey', trick='kangaroo')", })
def test_traces_sampler_gets_correct_values_in_sampling_context( sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803 ): traces_sampler = mock.Mock(return_value=True) sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler) queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) queue.enqueue(do_trick, "Bodhi", trick="roll over") worker.work(burst=True) traces_sampler.assert_any_call( DictionaryContaining({ "rq_job": ObjectDescribedBy( type=rq.job.Job, attrs={ "description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')", "result": "Bodhi, can you roll over? Good dog!", "func_name": "tests.integrations.rq.test_rq.do_trick", "args": ("Bodhi", ), "kwargs": { "trick": "roll over" }, }, ), }))
def app(): app = Flask(__name__) redis = FakeStrictRedis() app.session_interface = RedisSessionInterface(redis) # pylint: disable=unused-variable @app.route("/") def index(): return "Hello world", 200 @app.route("/private") def private(): if "user_id" not in session: return "Not OK", 403 return "Hello world", 200 @app.route("/login") def login(): session["user_id"] = 123 return "OK", 200 @app.route("/logout") def logout(): session.clear() return "OK", 200 with app.app_context(): yield app
def test_job_progress_included_in_json(self, uuid4Mock): """Tests that after posting to 'wordset_create', the response at 'wordset_create_progress json' contains attributes indicating the state of the job.""" # mocked Redis server conn = FakeStrictRedis() # replace redis connection with conn, replace the queue used by the view with a queue that uses conn # and immediately runs the task in the current thread with mock.patch.object(views, "rq_queue", new=Queue(is_async=False, connection=conn)): with mock.patch.object(views, "redis_cursor", new=conn): self.client.post('/words/wordset/create/', { 'name': 'test1', 'words': 'word\r\ntest' }) job = Job.fetch(self.job_id, connection=conn) logger.info(f'job {job}, connection {job.connection}') logger.info(f'status {job.get_status()}, meta {job.meta}') response = self.client.get( reverse("wordset_create_progress json", args=[self.job_id])) content = json.loads(response.content) self.assertIn('status', content) self.assertIn('potential_words', content) self.assertIn('processed_words', content) self.assertIn('recognized_words', content)
def test_can_count_queues_properly(self): try: loaded_procs.clear() # Put some jobs on the queue self._add_jobs_to_queue('high', 2) self._add_jobs_to_queue('bottom', 4) # Now fake a job being active for one of them for idx, queue_name in enumerate(['high', 'bottom']): queue = Queue(queue_name, connection=FakeStrictRedis()) registry = StartedJobRegistry(queue_name, queue.connection) # Passing in a negative score is important here, otherwise the job will be recognized as expired registry.connection.zadd(registry.key, -1, 'job_id_{}'.format(idx)) # Load the HF procs procs = load_procs(*( 'tests.contrib.django.testapp.rq_test_procs.WorkerProc', 'tests.contrib.django.testapp.rq_test_procs.AnotherWorkerProc' )) # Total should be all queued + 1 active for each assert sum([proc.quantity() for proc_name, proc in procs.items()]) == 8 finally: loaded_procs.clear()
def test_record_param_user_coll_skip(self): warc_path = to_path(self.root_dir + '/warcs/{user}/{coll}/') dedup_index = self._get_dedup_index(dupe_policy=SkipDupePolicy()) recorder_app = RecorderApp( self.upstream_url, PerRecordWARCWriter(warc_path, dedup_index=dedup_index)) # No new entries written self._test_all_warcs('/warcs/USER/COLL/', 2) resp = self._test_warc_write( recorder_app, 'httpbin.org', '/user-agent', '¶m.recorder.user=USER¶m.recorder.coll=COLL') assert '"user-agent": "{0}"'.format(UA) in resp.text #assert b'HTTP/1.1 200 OK' in resp.body #assert b'"foo": "bar"' in resp.body self._test_all_warcs('/warcs/USER/COLL/', 2) # Test Redis CDX r = FakeStrictRedis.from_url('redis://localhost/2') res = r.zrangebylex('USER:COLL:cdxj', '[org,httpbin)/', '(org,httpbin,') assert len(res) == 2
def test_record_param_user_coll_write_dupe_no_revisit(self): warc_path = to_path(self.root_dir + '/warcs/{user}/{coll}/') dedup_index = self._get_dedup_index(dupe_policy=WriteDupePolicy()) writer = PerRecordWARCWriter(warc_path, dedup_index=dedup_index) recorder_app = RecorderApp(self.upstream_url, writer) resp = self._test_warc_write( recorder_app, 'httpbin.org', '/get?foo=bar', '¶m.recorder.user=USER¶m.recorder.coll=COLL') assert b'HTTP/1.1 200 OK' in resp.body assert b'"foo": "bar"' in resp.body self._test_all_warcs('/warcs/USER/COLL/', 3) r = FakeStrictRedis.from_url('redis://localhost/2') res = r.zrangebylex('USER:COLL:cdxj', '[org,httpbin)/', '(org,httpbin,') assert len(res) == 3 mimes = [CDXObject(x)['mime'] for x in res] assert sorted(mimes) == [ 'application/json', 'application/json', 'warc/revisit' ] assert len(writer.fh_cache) == 0
def test_record_video_metadata(self): pytest.importorskip('youtube_dl') warc_path = to_path(self.root_dir + '/warcs/{user}/{coll}/') dedup_index = self._get_dedup_index() writer = PerRecordWARCWriter(warc_path, dedup_index=dedup_index) recorder_app = RecorderApp(self.upstream_url, writer) params = {'param.recorder.user': '******', 'param.recorder.coll': 'VIDEO', 'content_type': 'application/vnd.youtube-dl_formats+json' } resp = self._test_warc_write(recorder_app, 'www.youtube.com', '/v/BfBgWtAIbRc', '&' + urlencode(params), link_url='metadata://www.youtube.com/v/BfBgWtAIbRc') r = FakeStrictRedis.from_url('redis://localhost/2') warcs = r.hgetall('USER:VIDEO:warc') assert len(warcs) == 1 filename = list(warcs.values())[0] with open(filename, 'rb') as fh: decomp = DecompressingBufferedReader(fh) record = ArcWarcRecordLoader().parse_record_stream(decomp) status_headers = record.rec_headers assert status_headers.get_header('WARC-Type') == 'metadata' assert status_headers.get_header('Content-Type') == 'application/vnd.youtube-dl_formats+json' assert status_headers.get_header('WARC-Block-Digest') != '' assert status_headers.get_header('WARC-Block-Digest') == status_headers.get_header('WARC-Payload-Digest')
def test_record_param_user_coll(self): warc_path = to_path(self.root_dir + '/warcs/{user}/{coll}/') dedup_index = self._get_dedup_index() recorder_app = RecorderApp(self.upstream_url, PerRecordWARCWriter(warc_path, dedup_index=dedup_index)) self._test_all_warcs('/warcs/USER/COLL/', None) resp = self._test_warc_write(recorder_app, 'httpbin.org', '/user-agent', '¶m.recorder.user=USER¶m.recorder.coll=COLL') assert '"user-agent": "{0}"'.format(UA) in resp.text #assert b'HTTP/1.1 200 OK' in resp.body #assert b'"foo": "bar"' in resp.body self._test_all_warcs('/warcs/USER/COLL/', 1) r = FakeStrictRedis.from_url('redis://localhost/2') res = r.zrangebylex('USER:COLL:cdxj', '[org,httpbin)/', '(org,httpbin,') assert len(res) == 1 cdx = CDXObject(res[0]) assert cdx['urlkey'] == 'org,httpbin)/user-agent' assert cdx['mime'] == 'application/json' assert cdx['offset'] == '0' assert cdx['filename'].startswith(to_path('USER/COLL/')) assert cdx['filename'].endswith('.warc.gz') warcs = r.hgetall('USER:COLL:warc') full_path = to_path(self.root_dir + '/warcs/' + cdx['filename']) assert warcs == {cdx['filename'].encode('utf-8'): full_path.encode('utf-8')}
def test_error_redis_file_not_found(self): f = FakeStrictRedis.from_url('redis://localhost/2') f.hset('test:warc', 'example2.warc.gz', './x-no-such-dir/example2.warc.gz') resp = self.testapp.get( '/allredis/resource?url=http://www.example.com/', status=503) assert resp.json[ 'message'] == "example2.warc.gz: [Errno 2] No such file or directory: './x-no-such-dir/example2.warc.gz'" f.hdel('test:warc', 'example2.warc.gz') resp = self.testapp.get( '/allredis/resource?url=http://www.example.com/', status=503) assert resp.json == { 'message': 'example2.warc.gz: Archive File Not Found', 'errors': { 'WARCPathLoader': 'example2.warc.gz: Archive File Not Found' } } f.delete('test:warc') resp = self.testapp.get( '/allredis/resource?url=http://www.example.com/', status=503) assert resp.json == { 'message': 'example2.warc.gz: Archive File Not Found', 'errors': { 'WARCPathLoader': 'example2.warc.gz: Archive File Not Found' } }
def test_basic(sentry_init, capture_events): sentry_init(integrations=[RqIntegration()]) events = capture_events() queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) queue.enqueue(crashing_job, foo=42) worker.work(burst=True) (event, ) = events (exception, ) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" assert exception["mechanism"]["type"] == "rq" assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42" assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job" assert event["extra"]["rq-job"] == { "args": [], "description": "tests.integrations.rq.test_rq.crashing_job(foo=42)", "func": "tests.integrations.rq.test_rq.crashing_job", "job_id": event["extra"]["rq-job"]["job_id"], "kwargs": { "foo": 42 }, }
def test_coverage_summary_by_changeset(coverage_builds): from rq import Queue from codecoverage_backend import api from tests.conftest import mock_coverage_by_changeset_job_success # patch the queue to be sync to allow it run without workers. http://python-rq.org/docs/testing/ with mock.patch('codecoverage_backend.api.q', Queue(connection=FakeStrictRedis())) as q: # patch the mock_coverage_by_changeset with mock.patch('codecoverage_backend.api.coverage_by_changeset_job', mock_coverage_by_changeset_job_success): # Get changeset coverage information for changeset, expected in coverage_builds['summary'].items(): result, code = api.coverage_summary_by_changeset(changeset) assert code == 202 # test that in the case of exception it will return 500 result, code = api.coverage_summary_by_changeset( 'mozilla test changeset') assert code == 202 # run simple worker to run all tasks w = SimpleWorker([q], connection=q.connection) w.work(burst=True) # Everything should be 200 now for changeset, expected in coverage_builds['summary'].items(): result, code = api.coverage_summary_by_changeset(changeset) assert result == expected assert code == 200 # except the incorrect changeset, should be 500 result, code = api.coverage_summary_by_changeset( 'mozilla test changeset') assert code == 500
def setup(self): self.app.conf.add_defaults({ 'REDBEAT_KEY_PREFIX': 'rb-tests:', 'redbeat_key_prefix': 'rb-tests:', }) self.app.redbeat_redis = FakeStrictRedis(decode_responses=True) self.app.redbeat_redis.flushdb()
def __init__(self, host: str, port: int = 6379, db: int = 0): if environ.env.config.get(ConfigKeys.TESTING, False) or host == 'mock': from fakeredis import FakeStrictRedis as Redis else: from redis import Redis self.redis = Redis(host=host, port=port, db=db)
def test_redis_warc_1(self): f = FakeStrictRedis.from_url('redis://localhost/2') f.hset('test:warc', 'example2.warc.gz', TEST_WARC_PATH + 'example2.warc.gz') resp = self.testapp.get('/allredis/resource?url=http://www.example.com/') assert resp.headers['Warcserver-Source-Coll'] == 'example'
def scheduler(config: Dict[str, Any]) -> ChaosPlatformScheduler: queue = create_scheduler_queue("myqueue", FakeStrictRedis(singleton=False), is_async=False) sched = create_scheduler(queue, config) sched.interval = 1 return sched
def test_transport_shutdown(sentry_init): sentry_init(integrations=[RqIntegration()]) events_r, events_w = os.pipe() events_r = os.fdopen(events_r, "rb", 0) events_w = os.fdopen(events_w, "wb", 0) def capture_event(event): events_w.write(json.dumps(event).encode("utf-8")) events_w.write(b"\n") def flush(timeout=None, callback=None): events_w.write(b"flush\n") Hub.current.client.transport.capture_event = capture_event Hub.current.client.flush = flush queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.Worker([queue], connection=queue.connection) queue.enqueue(crashing_job, foo=42) worker.work(burst=True) event = events_r.readline() event = json.loads(event.decode("utf-8")) exception, = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" assert events_r.readline() == b"flush\n"
def get_opts(use_redis=False): if use_redis: return { "rc": FakeStrictRedis(decode_responses=True), "use_in_memory_on_failure": False, } return {"use_in_memory_on_failure": True}
def __init__(self, old_redis_url, new_redis_url, dry_run=True, per_recording_list=False, s3_import=False, s3_root=None): self.old_redis = StrictRedis.from_url(old_redis_url, decode_responses=True) self.dry_run = dry_run self.per_recording_list = per_recording_list self.s3_import = s3_import if s3_import: assert(s3_root) import boto3 self.s3_root = s3_root self.s3 = boto3.client('s3') else: self.s3_root = None self.s3 = None if self.dry_run: import redis redis.StrictRedis = fakeredis.FakeStrictRedis self.redis = FakeStrictRedis.from_url(new_redis_url, decode_responses=True) else: self.redis = StrictRedis.from_url(new_redis_url, decode_responses=True) print('Redis Inited') self.cli = CLIUserManager(new_redis_url)
def test_enqueue_task(self): fake_redis = FakeStrictRedis() fake_queue_id = 'fake_producer_%s' % uuid.uuid4().hex producer = ProducerQueue.from_broker( broker=fake_redis, producer_queue_id=fake_queue_id ) fake_descriptor = 'fake_descriptor_%s' % uuid.uuid4().hex producer.enqueue_task( descriptor=fake_descriptor ) assert fake_redis.llen(fake_queue_id) == 1 enqueued_descriptor = fake_redis.lpop(fake_queue_id) assert enqueued_descriptor == fake_descriptor
def test_record_custom_record(self): dedup_index = self._get_dedup_index(user=False) warc_path = to_path(self.root_dir + '/warcs/meta/meta.warc.gz') writer = MultiFileWARCWriter(warc_path, dedup_index=dedup_index) recorder_app = RecorderApp(self.upstream_url, writer) req_url = '/live/resource/postreq?url=custom://httpbin.org¶m.recorder.coll=META&put_record=resource' buff = b'Some Data' testapp = webtest.TestApp(recorder_app) headers = {'content-type': 'text/plain', 'WARC-Custom': 'foo' } resp = testapp.put(req_url, headers=headers, params=buff) assert resp.json['success'] == 'true' assert resp.json['WARC-Date'] != '' self._test_all_warcs('/warcs/meta', 1) r = FakeStrictRedis.from_url('redis://localhost/2') warcs = r.hgetall('META:warc') assert len(warcs) == 1 warc_key = os.path.join('meta', 'meta.warc.gz').encode('utf-8') with open(warcs[warc_key], 'rb') as fh: decomp = DecompressingBufferedReader(fh) record = ArcWarcRecordLoader().parse_record_stream(decomp, ensure_http_headers=True) status_headers = record.rec_headers assert len(record.rec_headers.headers) == 9 assert status_headers.get_header('WARC-Type') == 'resource' assert status_headers.get_header('WARC-Target-URI') == 'custom://httpbin.org' assert status_headers.get_header('WARC-Record-ID') != '' assert status_headers.get_header('WARC-Date') != '' assert status_headers.get_header('WARC-Block-Digest') != '' assert status_headers.get_header('WARC-Block-Digest') == status_headers.get_header('WARC-Payload-Digest') assert status_headers.get_header('Content-Type') == 'text/plain' assert status_headers.get_header('Content-Length') == str(len(buff)) assert status_headers.get_header('WARC-Custom') == 'foo' assert record.raw_stream.read() == buff status_headers = record.http_headers assert len(record.http_headers.headers) == 2 assert status_headers.get_header('Content-Type') == 'text/plain' assert status_headers.get_header('Content-Length') == str(len(buff)) writer.close() assert len(writer.fh_cache) == 0
def setup_class(cls, redis_url='redis://localhost:6379/2'): super(FakeRedisTests, cls).setup_class() del PUBSUBS[:] DATABASES.clear() cls.redismock = patch('redis.StrictRedis', FakeStrictRedisSharedPubSub) cls.redismock.start() cls.redis = FakeStrictRedis.from_url(redis_url)
def test_delft3dgt_pulse(self, mockredis, mockcall): """ Assert that de delft3dgt_pulse task calls the containersync_sceneupdate() only once. """ delft3dgt_pulse.delay() # Fakeredis stores at module level fake = FakeStrictRedis() # Set redis key with TTL 100 seconds from now # so subsequent tasks won't run fake.set('qo_delft3dcontainermanager.tasks.delft3dgt_pulse', int(time()) + 100) delft3dgt_pulse.delay() delft3dgt_pulse.delay() mockcall.assert_called_with('containersync_sceneupdate') self.assertEqual(mockcall.call_count, 1)
def test_url_agnost(self): f = FakeStrictRedis.from_url('redis://localhost/2') f.hset('test:foo:warc', 'example-url-agnostic-revisit.warc.gz', TEST_WARC_PATH + 'example-url-agnostic-revisit.warc.gz') f.hset('test:foo:warc', 'example-url-agnostic-orig.warc.gz', TEST_WARC_PATH + 'example-url-agnostic-orig.warc.gz') resp = self.testapp.get('/urlagnost/resource?url=http://example.com/¶m.arg=foo') assert resp.status_int == 200 assert resp.headers['Link'] == MementoUtils.make_link('http://[email protected]/', 'original') assert resp.headers['Warcserver-Source-Coll'] == 'url-agnost' assert resp.headers['Memento-Datetime'] == 'Mon, 29 Jul 2013 19:51:51 GMT'
def setUp(self): self.f = "test_trucks.csv" with open(self.f, "w") as out: out.write("Applicant,Address,FoodItems,Status,Latitude,Longitude") out.write("\n") out.write("MyTruck,1234 Main St,asdf:fdsa,APPROVED,37.0,-122.0") self.lat = 37.0 self.lon = -122.0 self.rad = 100.0 self.name = "MyTruck" self.red = FakeStrictRedis() load_trucks(self.f, self.red)
def test_record_param_user_coll_revisit(self): warc_path = to_path(self.root_dir + '/warcs/{user}/{coll}/') dedup_index = self._get_dedup_index() recorder_app = RecorderApp(self.upstream_url, PerRecordWARCWriter(warc_path, dedup_index=dedup_index)) self._test_all_warcs('/warcs/USER/COLL/', 1) resp = self._test_warc_write(recorder_app, 'httpbin.org', '/user-agent', '¶m.recorder.user=USER¶m.recorder.coll=COLL') assert '"user-agent": "{0}"'.format(UA) in resp.text #assert b'HTTP/1.1 200 OK' in resp.body #assert b'"foo": "bar"' in resp.body self._test_all_warcs('/warcs/USER/COLL/', 2) # Test Redis CDX r = FakeStrictRedis.from_url('redis://localhost/2') res = r.zrangebylex('USER:COLL:cdxj', '[org,httpbin)/', '(org,httpbin,') assert len(res) == 2 if b'warc/revisit' in res[0]: cdx = CDXObject(res[0]) else: cdx = CDXObject(res[1]) assert cdx['urlkey'] == 'org,httpbin)/user-agent' assert cdx['mime'] == 'warc/revisit' assert cdx['offset'] == '0' assert cdx['filename'].startswith(to_path('USER/COLL/')) assert cdx['filename'].endswith('.warc.gz') fullwarc = os.path.join(self.root_dir, 'warcs', cdx['filename']) warcs = r.hgetall('USER:COLL:warc') assert len(warcs) == 2 assert warcs[cdx['filename'].encode('utf-8')] == fullwarc.encode('utf-8') with open(fullwarc, 'rb') as fh: decomp = DecompressingBufferedReader(fh) # Test refers-to headers status_headers = StatusAndHeadersParser(['WARC/1.0']).parse(decomp) assert status_headers.get_header('WARC-Type') == 'revisit' assert status_headers.get_header('WARC-Target-URI') == 'http://httpbin.org/user-agent' assert status_headers.get_header('WARC-Date') != '' assert status_headers.get_header('WARC-Refers-To-Target-URI') == 'http://httpbin.org/user-agent' assert status_headers.get_header('WARC-Refers-To-Date') != ''
def test_anon_auto_delete(self): sesh_redis = FakeStrictRedis.from_url('redis://localhost:6379/0') sesh_redis.flushdb() def assert_empty_keys(): assert set(self.redis.keys()) == set(self.POST_DEL_KEYS) assert glob.glob(os.path.join(self.warcs_dir, 'temp$*')) == [] self.sleep_try(0.1, 10.0, assert_empty_keys) def assert_dir_delete(): assert not os.path.isdir(os.path.join(self.warcs_dir, self.anon_user)) self.sleep_try(0.1, 5.0, assert_dir_delete)
def setUp(self): self.alchemyapi_data = {} httpretty.enable() httpretty.register_uri( method=httpretty.POST, uri=re.compile(r'http://access.alchemyapi.com/calls/text/TextGetTargetedSentiment(\?.*)?'), body=self.fake_alchemy_api ) # TODO: httpretty.HTTPretty.allow_net_connect = False self.redis = FakeStrictRedis() self.redis_patcher = patch('bvs.database.StrictRedis') self.redis_mock = self.redis_patcher.start() self.redis_mock.return_value = self.redis
def test_error_redis_file_not_found(self): f = FakeStrictRedis.from_url('redis://localhost/2') f.hset('test:warc', 'example2.warc.gz', './x-no-such-dir/example2.warc.gz') resp = self.testapp.get('/allredis/resource?url=http://www.example.com/', status=503) assert resp.json['message'] == "example2.warc.gz: [Errno 2] No such file or directory: './x-no-such-dir/example2.warc.gz'" f.hdel('test:warc', 'example2.warc.gz') resp = self.testapp.get('/allredis/resource?url=http://www.example.com/', status=503) assert resp.json == {'message': 'example2.warc.gz: Archive File Not Found', 'errors': {'WARCPathLoader': 'example2.warc.gz: Archive File Not Found'}} f.delete('test:warc') resp = self.testapp.get('/allredis/resource?url=http://www.example.com/', status=503) assert resp.json == {'message': 'example2.warc.gz: Archive File Not Found', 'errors': {'WARCPathLoader': 'example2.warc.gz: Archive File Not Found'}}
def test_record_param_user_coll_skip(self): warc_path = to_path(self.root_dir + '/warcs/{user}/{coll}/') dedup_index = self._get_dedup_index(dupe_policy=SkipDupePolicy()) recorder_app = RecorderApp(self.upstream_url, PerRecordWARCWriter(warc_path, dedup_index=dedup_index)) # No new entries written self._test_all_warcs('/warcs/', 2) resp = self._test_warc_write(recorder_app, 'httpbin.org', '/get?foo=bar', '¶m.recorder.user=USER¶m.recorder.coll=COLL') assert b'HTTP/1.1 200 OK' in resp.body assert b'"foo": "bar"' in resp.body self._test_all_warcs('/warcs/USER/COLL/', 2) # Test Redis CDX r = FakeStrictRedis.from_url('redis://localhost/2') res = r.zrangebylex('USER:COLL:cdxj', '[org,httpbin)/', '(org,httpbin,') assert len(res) == 2
def add_cdx_to_redis(filename, key, redis_url='redis://localhost:6379/2'): r = FakeStrictRedis.from_url(redis_url) with open(filename, 'rb') as fh: for line in fh: r.zadd(key, 0, line.rstrip())
def setup_class(cls, extra_config_file='test_no_invites_config.yaml', init_anon=True, **kwargs): super(BaseWRTests, cls).setup_class() cls.warcs_dir = to_path(cls.root_dir + '/warcs/') cls.storage_dir = os.path.join(to_path(cls.root_dir + '/storage/')) os.makedirs(cls.warcs_dir) os.environ['RECORD_ROOT'] = cls.warcs_dir os.environ['STORAGE_ROOT'] = cls.storage_dir cls.storage_today = os.path.join(cls.storage_dir, today_str()) os.environ['WR_CONFIG'] = 'pkg://webrecorder/config/wr.yaml' if extra_config_file: os.environ['WR_USER_CONFIG'] = os.path.join(cls.get_curr_dir(), extra_config_file) os.environ['REDIS_BASE_URL'] = 'redis://*****:*****@localhost') cls.set_nx_env('EMAIL_SMTP_URL', 'smtp://[email protected]:test@localhost:25') cls.set_nx_env('NO_REMOTE_BROWSERS', '1') def load_wr_config(): config = load_overlay_config('WR_CONFIG', 'pkg://webrecorder/config/wr.yaml', 'WR_USER_CONFIG', '') config['dyn_stats_key_templ'] = { 'rec': 'r:{rec}:<sesh_id>:stats:', 'coll': 'c:{coll}:<sesh_id>:stats:' } config['dyn_ref_templ'] = { 'rec': 'r:{rec}:<sesh_id>:ref:', 'coll': 'c:{coll}:<sesh_id>:ref:', } return config import webrecorder.maincontroller webrecorder.maincontroller.load_wr_config = load_wr_config cls.redis = FakeStrictRedis.from_url(os.environ['REDIS_BASE_URL'], decode_responses=True) cls.sesh_redis = FakeStrictRedis.from_url(os.environ['REDIS_SESSION_URL'], decode_responses=True) cls.custom_init(kwargs) if kwargs.get('no_app'): return cls.maincont = MainController() cls.testapp = webtest.TestApp(cls.maincont.app) if init_anon: res = cls.testapp.post('/api/v1/auth/anon_user') cls.anon_user = res.json['user']['username'] cls.assert_temp_user_sesh(cls.anon_user) else: cls.anon_user = None
class RedisStoreTestCase(TestCase): def setUp(self): self.redis = FakeStrictRedis() super(RedisStoreTestCase, self).setUp() def tearDown(self): flexmock_teardown() self.redis.flushdb() def test_get_returns_null_when_not_found(self): store = self.get_store() self.assertIsNone(store.get('foo')) def test_redis_value_is_returned(self): store = self.get_store() self.redis.set('prefix:foo', store.serialize('bar')) self.assertEqual('bar', store.get('foo')) def test_redis_value_is_returned_for_numerics(self): store = self.get_store() self.redis.set('prefix:foo', store.serialize(1)) self.assertEqual(1, store.get('foo')) def test_put_value_into_redis(self): store = self.get_store() store.put('foo', 'bar', 60) self.assertEqual(store.serialize('bar'), self.redis.get('prefix:foo')) self.assertEqual(60., round(math.ceil(float(self.redis.ttl('prefix:foo')) / 60))) def test_put_numeric_value_into_redis(self): store = self.get_store() store.put('foo', 1, 60) self.assertEqual(store.serialize(1), self.redis.get('prefix:foo')) self.assertEqual(60., round(math.ceil(float(self.redis.ttl('prefix:foo')) / 60))) def test_increment(self): store = self.get_store() self.redis.set('prefix:foo', 1) store.increment('foo', 2) self.assertEqual(3, int(self.redis.get('prefix:foo'))) def test_decrement(self): store = self.get_store() self.redis.set('prefix:foo', 3) store.decrement('foo', 2) self.assertEqual(1, int(self.redis.get('prefix:foo'))) def test_forever(self): store = self.get_store() store.forever('foo', 'bar') self.assertEqual(store.serialize('bar'), self.redis.get('prefix:foo')) self.assertIsNone(self.redis.ttl('prefix:foo')) def test_forget(self): store = self.get_store() self.redis.set('prefix:foo', 'bar') store.forget('foo') self.assertFalse(self.redis.exists('prefix:foo')) def get_store(self): return RedisStore(prefix='prefix:', redis_class=FakeStrictRedis)
def setUp(self): self.redis = FakeStrictRedis() super(RedisStoreTestCase, self).setUp()
def set(self, key, value, expire=None, pexpire=None, only_if_not_exists=False, only_if_exists=False): return FakeStrictRedis.set(self, key, value, ex=expire, px=pexpire, nx=only_if_not_exists, xx=only_if_exists)
def zrangebyscore(self, key, min='-inf', max='+inf', withscores=False, offset=None, count=None): return FakeStrictRedis.zrangebyscore(self, key, min, max, start=offset, num=count, withscores=withscores)
class OpinionTest(TestCase): def fake_alchemy_api(self, request, uri, headers): data = { "status": "OK", "usage": "By accessing AlchemyAPI or using information generated by AlchemyAPI, you are agreeing to be bound by the AlchemyAPI Terms of Use: http://www.alchemyapi.com/company/terms.html", "totalTransactions": "1", "language": "english", "docSentiment": { "score": str(random.random()), "type": "positive" } } for k,v in self.alchemyapi_data.iteritems(): keys = k.split('_') if len(keys) == 2: data[keys[0]][keys[1]] = v else: data[keys[0]] = v return (200, headers, json.dumps(data)) def setUp(self): self.alchemyapi_data = {} httpretty.enable() httpretty.register_uri( method=httpretty.POST, uri=re.compile(r'http://access.alchemyapi.com/calls/text/TextGetTargetedSentiment(\?.*)?'), body=self.fake_alchemy_api ) # TODO: httpretty.HTTPretty.allow_net_connect = False self.redis = FakeStrictRedis() self.redis_patcher = patch('bvs.database.StrictRedis') self.redis_mock = self.redis_patcher.start() self.redis_mock.return_value = self.redis def tearDown(self): httpretty.disable() self.redis_patcher.stop() def constraint_alchemy_api(self, **kwargs): self.alchemyapi_data = kwargs def constraint_score(self, **kwargs): data = { 'positive': random.randint(0, 100), 'neutral': random.randint(0, 100), 'negative': random.randint(0, 100), 'error': random.randint(0, 100), } data.update(kwargs) for k, v in data.iteritems(): self.redis.set(k, v) def fixture_tweet(self): tweet = dict( id = 4564560, user = '******', picture = 'http://twitter.com/batman.jpg', msg = "It's fantastic BvS", timestamp = 1231231, ) return tweet def test_simple_alchemy_call(self): tweet = self.fixture_tweet() actual = opinion(tweet, 'any') # self.assertTrue(False) def test_positive_opinion(self): self.constraint_alchemy_api(docSentiment_type='positive') tweet = self.fixture_tweet() actual = opinion(tweet, 'any') self.assertEquals('positive', tweet['status']) def test_negative_opinion(self): self.constraint_alchemy_api(docSentiment_type='negative') tweet = self.fixture_tweet() actual = opinion(tweet, 'any') self.assertEquals('negative', tweet['status']) def test_score(self): self.constraint_score(positive=23) pubsub = self.redis.pubsub() pubsub.subscribe('score') notify_score() msg = pubsub.get_message() self.assertEquals(msg['channel'], 'score')
def setup_module(): r = FakeStrictRedis.from_url('redis://localhost:6379/2') r.delete('test:rediscdx') with open('testdata/iana.cdxj', 'rb') as fh: for line in fh: r.zadd('test:rediscdx', 0, line.rstrip())
class TestEnd(_TestOnPost): # noinspection PyAttributeOutsideInit def before(self): super(TestEnd, self).before() self.redis = FakeStrictRedis() self.redis.zadd('game:1:scores', 100, 100) self.redis.zadd('game:1:scores', 110, 110) self.resource = End(self.redis) self.api.add_route('/end/{game_id}', self.resource) self.api.req_options.auto_parse_form_urlencoded = True def after(self): self.redis.flushall() del self.redis def test_without_uid(self): _run_id = 'some_random_thing' _score = 105 query_string = urlencode({'run_id': _run_id, 'score': _score}) body = self.simulate_request('/end/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('rank', data, 'rank has been set') self.assertNotIn('best_score', data, 'no best_score') self.assertNotIn('best_rank', data, 'no best_rank') self.assertEquals(1, data['rank'], 'rank should be 1') self.assertEquals(str(_score), self.redis.hget('game:1:final', _run_id)) self.assertTrue(self.redis.hexists('game:1:end', _run_id), 'should record end time') def test_with_uid(self): _run_id = 'some_random_thing' _score = 105 _uid = 15 query_string = urlencode({'run_id': _run_id, 'score': _score, 'uid': _uid}) body = self.simulate_request('/end/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('rank', data, 'rank has been set') self.assertIn('best_score', data, 'best_score has been set') self.assertIn('best_rank', data, 'best_rank has been set') self.assertEquals(1, data['rank'], 'rank should be 1') self.assertEquals(_score, data['best_score'], 'score should be set as best score') self.assertEquals(1, data['best_rank'], 'best_rank should be 1') self.assertEquals(str(_score), self.redis.hget('game:1:final', _run_id)) self.assertTrue(self.redis.hexists('game:1:end', _run_id), 'should record end time') def test_update_best_records(self): _run_id = 'some_random_thing' _score = 105 _uid = 15 query_string = urlencode({'run_id': _run_id, 'score': _score, 'uid': _uid}) self.redis.hset('game:1:record:scores', _uid, 100) self.redis.hset('game:1:record:ranks', _uid, 2) body = self.simulate_request('/end/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('rank', data, 'rank has been set') self.assertIn('best_score', data, 'best_score has been set') self.assertIn('best_rank', data, 'best_rank has been set') self.assertEquals(1, data['rank'], 'rank should be 1') self.assertEquals(_score, data['best_score'], 'score should be set as best score') self.assertEquals(1, data['best_rank'], 'best_rank should be 1') self.assertEquals(str(_score), self.redis.hget('game:1:final', _run_id)) self.assertTrue(self.redis.hexists('game:1:end', _run_id), 'should record end time') def test_oot_update(self): _run_id = 'some_random_thing' _score = 105 _uid = 15 query_string = urlencode({'run_id': _run_id, 'score': _score, 'uid': _uid}) self.redis.hset('game:1:record:scores', _uid, 150) self.redis.hset('game:1:record:ranks', _uid, 0) body = self.simulate_request('/end/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('rank', data, 'rank has been set') self.assertIn('best_score', data, 'best_score has been set') self.assertIn('best_rank', data, 'best_rank has been set') self.assertEquals(1, data['rank'], 'rank should be 1') self.assertEquals(150, data['best_score'], 'score should be 150') self.assertEquals(0, data['best_rank'], 'best_rank should be 0') self.assertEquals(str(_score), self.redis.hget('game:1:final', _run_id)) self.assertTrue(self.redis.hexists('game:1:end', _run_id), 'should record end time')
class TestStart(_TestOnPost): # noinspection PyAttributeOutsideInit def before(self): super(TestStart, self).before() self.redis = FakeStrictRedis() self.resource = Start(self.redis) self.api.add_route('/start/{game_id}', self.resource) def after(self): self.redis.flushall() del self.redis def test_without_userinfo(self): body = self.simulate_request('/start/1', decode='utf-8', method='POST') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('run_id', data, 'run_id has been set') self.assertIsNone(self.redis.hget('game:1:run', data['run_id']), 'redis run hash should be empty') self.assertIsNone(self.redis.hget('game:1:start', data['run_id']), 'redis start hash should be empty') def test_with_uid(self): _uid = 25 query_string = urlencode({'uid': _uid, 'userinfo': 'nothing', 'hash': '1234'}) body = self.simulate_request('/start/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('run_id', data, 'run_id has been set') self.assertIn('uid', data, 'uid has been set') self.assertEquals(_uid, data['uid'], 'use the old uid') self.assertEquals(str(data['uid']), self.redis.hget('game:1:run', data['run_id']), 'store run_id and uid in redis') self.assertIsNone(self.redis.hget('game:1:userinfo', data['uid']), 'should not save userinfo') self.assertTrue(self.redis.hexists('game:1:start', data['run_id']), 'should record start time') def test_with_userinfo_only(self): query_string = urlencode({'userinfo': json.dumps({'a': '1', 'b': '2'})}) body = self.simulate_request('/start/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_400, self.srmock.status) data = json.loads(body) self.assertEquals('Missing parameter', data['title']) def test_with_userinfo_and_hash(self): _hash = 105 _userinfo = json.dumps({'field1': u'王思聪', 'field2': '15888888888'}) query_string = urlencode({'userinfo': _userinfo, 'hash': _hash}) body = self.simulate_request('/start/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('run_id', data, 'run_id has been set') self.assertIn('uid', data, 'uid has been set') self.assertEquals(_hash, data['uid'], 'use the old uid') self.assertEquals(str(data['uid']), self.redis.hget('game:1:run', data['run_id']), 'store run_id and uid in redis') self.assertEquals(_userinfo, self.redis.hget('game:1:userinfo', data['uid']), 'store userinfo and uid in redis') self.assertTrue(self.redis.hexists('game:1:start', data['run_id']), 'should record start time') def test_with_userinfo_and_hash_collision(self): _hash = 105 _userinfo = json.dumps({'field1': u'王思聪', 'field2': '15888888888'}) query_string = urlencode({'userinfo': _userinfo, 'hash': _hash}) self.redis.hset('game:1:userinfo', _hash, 'something different') body = self.simulate_request('/start/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('run_id', data, 'run_id has been set') self.assertIn('uid', data, 'uid has been set') self.assertNotEquals(_hash, data['uid'], 'generate a new uid different from original hash') self.assertEquals(str(data['uid']), self.redis.hget('game:1:run', data['run_id']), 'store run_id and uid in redis') self.assertEquals(_userinfo, self.redis.hget('game:1:userinfo', data['uid']), 'store userinfo and uid in redis') self.assertTrue(self.redis.hexists('game:1:start', data['run_id']), 'should record start time') def test_with_used_userinfo_and_hash(self): _hash = 105 _userinfo = json.dumps({'field1': u'王思聪', 'field2': '15888888888'}) query_string = urlencode({'userinfo': _userinfo, 'hash': _hash}) self.redis.hset('game:1:userinfo', _hash, _userinfo) body = self.simulate_request('/start/1', method='POST', query_string=query_string, decode='utf-8') self.assertEquals(falcon.HTTP_200, self.srmock.status) data = json.loads(body) self.assertIn('run_id', data, 'run_id has been set') self.assertIn('uid', data, 'uid has been set') self.assertEquals(_hash, data['uid'], 'use the original hash as uid') self.assertEquals(str(data['uid']), self.redis.hget('game:1:run', data['run_id']), 'store run_id and uid in redis') self.assertEquals(_userinfo, self.redis.hget('game:1:userinfo', data['uid']), 'store userinfo and uid in redis') self.assertTrue(self.redis.hexists('game:1:start', data['run_id']), 'should record start time')
def before(self): super(TestStart, self).before() self.redis = FakeStrictRedis() self.resource = Start(self.redis) self.api.add_route('/start/{game_id}', self.resource)