class RedisDictTests(TestCase): """ Tests for the RedisHashDict and RedisFlatDict containers """ @mock.patch("redis.Redis", MockRedis) def setUp(self): client = get_default_client() # Use arbitrary orc8r proto to test with self._hash_dict = RedisHashDict(client, "unittest", get_proto_serializer(), get_proto_deserializer(LogVerbosity)) serde = RedisSerde('log_verbosity', get_proto_serializer(), get_proto_deserializer(LogVerbosity)) self._flat_dict = RedisFlatDict(client, serde) @mock.patch("redis.Redis", MockRedis) def test_hash_insert(self): expected = LogVerbosity(verbosity=0) expected2 = LogVerbosity(verbosity=1) # insert proto self._hash_dict['key1'] = expected version = self._hash_dict.get_version("key1") actual = self._hash_dict['key1'] self.assertEqual(1, version) self.assertEqual(expected, actual) # update proto self._hash_dict['key1'] = expected2 version2 = self._hash_dict.get_version("key1") actual2 = self._hash_dict['key1'] self.assertEqual(2, version2) self.assertEqual(expected2, actual2) @mock.patch("redis.Redis", MockRedis) def test_missing_version(self): missing_version = self._hash_dict.get_version("key2") self.assertEqual(0, missing_version) @mock.patch("redis.Redis", MockRedis) def test_hash_delete(self): expected = LogVerbosity(verbosity=2) self._hash_dict['key3'] = expected actual = self._hash_dict['key3'] self.assertEqual(expected, actual) self._hash_dict.pop('key3') self.assertRaises(KeyError, self._hash_dict.__getitem__, 'key3') @mock.patch("redis.Redis", MockRedis) def test_flat_insert(self): expected = LogVerbosity(verbosity=5) expected2 = LogVerbosity(verbosity=1) # insert proto self._flat_dict['key1'] = expected version = self._flat_dict.get_version("key1") actual = self._flat_dict['key1'] self.assertEqual(1, version) self.assertEqual(expected, actual) # update proto self._flat_dict["key1"] = expected2 version2 = self._flat_dict.get_version("key1") actual2 = self._flat_dict["key1"] actual3 = self._flat_dict.get("key1") self.assertEqual(2, version2) self.assertEqual(expected2, actual2) self.assertEqual(expected2, actual3) @mock.patch("redis.Redis", MockRedis) def test_flat_missing_version(self): missing_version = self._flat_dict.get_version("key2") self.assertEqual(0, missing_version) @mock.patch("redis.Redis", MockRedis) def test_flat_bad_key(self): expected = LogVerbosity(verbosity=2) self.assertRaises(ValueError, self._flat_dict.__setitem__, 'bad:key', expected) self.assertRaises(ValueError, self._flat_dict.__getitem__, 'bad:key') self.assertRaises(ValueError, self._flat_dict.__delitem__, 'bad:key') @mock.patch("redis.Redis", MockRedis) def test_flat_delete(self): expected = LogVerbosity(verbosity=2) self._flat_dict['key3'] = expected actual = self._flat_dict['key3'] self.assertEqual(expected, actual) del self._flat_dict['key3'] self.assertRaises(KeyError, self._flat_dict.__getitem__, 'key3') self.assertEqual(None, self._flat_dict.get('key3')) @mock.patch("redis.Redis", MockRedis) def test_flat_clear(self): expected = LogVerbosity(verbosity=2) self._flat_dict['key3'] = expected actual = self._flat_dict['key3'] self.assertEqual(expected, actual) self._flat_dict.clear() self.assertEqual(0, len(self._flat_dict.keys())) @mock.patch("redis.Redis", MockRedis) def test_flat_garbage_methods(self): expected = LogVerbosity(verbosity=2) expected2 = LogVerbosity(verbosity=3) key = "k1" key2 = "k2" bad_key = "bad_key" self._flat_dict[key] = expected self._flat_dict[key2] = expected2 self._flat_dict.mark_as_garbage(key) is_garbage = self._flat_dict.is_garbage(key) self.assertTrue(is_garbage) is_garbage2 = self._flat_dict.is_garbage(key2) self.assertFalse(is_garbage2) self.assertEqual([key], self._flat_dict.garbage_keys()) self.assertEqual([key2], self._flat_dict.keys()) self.assertIsNone(self._flat_dict.get(key)) self.assertEqual(expected2, self._flat_dict.get(key2)) deleted = self._flat_dict.delete_garbage(key) not_deleted = self._flat_dict.delete_garbage(key2) self.assertTrue(deleted) self.assertFalse(not_deleted) self.assertIsNone(self._flat_dict.get(key)) self.assertEqual(expected2, self._flat_dict.get(key2)) with self.assertRaises(KeyError): self._flat_dict.is_garbage(bad_key) with self.assertRaises(KeyError): self._flat_dict.mark_as_garbage(bad_key)
class GatewayDirectoryServiceRpcServicer(GatewayDirectoryServiceServicer): """ gRPC based server for the Directoryd Gateway service. """ def __init__(self): serde = RedisSerde(DIRECTORYD_REDIS_TYPE, get_json_serializer(), get_json_deserializer()) self._redis_dict = RedisFlatDict(get_default_client(), serde) def add_to_server(self, server): """ Add the servicer to a gRPC server """ add_GatewayDirectoryServiceServicer_to_server(self, server) @return_void def UpdateRecord(self, request, context): """ Update the directory record of an object Args: request (UpdateRecordRequest): update record request """ if len(request.id) == 0: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details("ID argument cannot be empty in " "UpdateRecordRequest") return # Lock Redis for requested key until update is complete with self._redis_dict.lock(request.id): hwid = get_gateway_hwid() record = self._redis_dict.get(request.id) or \ DirectoryRecord(location_history=[hwid], identifiers={}) if record.location_history[0] != hwid: record.location_history = [hwid] + record.location_history for field_key in request.fields: record.identifiers[field_key] = request.fields[field_key] # Truncate location history to the five most recent hwid's record.location_history = \ record.location_history[:LOCATION_MAX_LEN] self._redis_dict[request.id] = record @return_void def DeleteRecord(self, request, context): """ Delete the directory record for an ID Args: request (DeleteRecordRequest): delete record request """ if len(request.id) == 0: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details("ID argument cannot be empty in " "DeleteRecordRequest") return # Lock Redis for requested key until delete is complete with self._redis_dict.lock(request.id): if request.id not in self._redis_dict: context.set_code(grpc.StatusCode.NOT_FOUND) context.set_details("Record for ID %s was not found." % request.id) return self._redis_dict.mark_as_garbage(request.id) def GetDirectoryField(self, request, context): """ Get the directory record field for an ID and key Args: request (GetDirectoryFieldRequest): get directory field request """ if len(request.id) == 0: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details("ID argument cannot be empty in " "GetDirectoryFieldRequest") return if len(request.field_key) == 0: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details("Field key argument cannot be empty in " "GetDirectoryFieldRequest") return # Lock Redis for requested key until get is complete with self._redis_dict.lock(request.id): if request.id not in self._redis_dict: context.set_code(grpc.StatusCode.NOT_FOUND) context.set_details("Record for ID %s was not found." % request.id) return record = self._redis_dict[request.id] if request.field_key not in record.identifiers: context.set_code(grpc.StatusCode.NOT_FOUND) context.set_details("Field %s was not found in record for " "ID %s" % (request.field_key, request.id)) return return DirectoryField(key=request.field_key, value=record.identifiers[request.field_key]) def GetAllDirectoryRecords(self, request, context): """ Get all directory records Args: request (Void): void """ response = AllDirectoryRecords() for key in self._redis_dict.keys(): with self._redis_dict.lock(key): # Lookup may produce an exception if the key has been deleted # between the call to __iter__ and lock try: stored_record = self._redis_dict[key] except KeyError: continue directory_record = response.records.add() directory_record.id = key directory_record.location_history[:] = \ stored_record.location_history for identifier_key in stored_record.identifiers: directory_record.fields[identifier_key] = \ stored_record.identifiers[identifier_key] return response
class GarbageCollectorTests(TestCase): def setUp(self): self.mock_redis = fakeredis.FakeStrictRedis() self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) service = MagicMock() service.config = { # Replicate arbitrary orc8r protos 'state_protos': [{'proto_file': 'orc8r.protos.common_pb2', 'proto_msg': 'NetworkID', 'redis_key': NID_TYPE, 'state_scope': 'network'}, {'proto_file': 'orc8r.protos.service303_pb2', 'proto_msg': 'LogVerbosity', 'redis_key': LOG_TYPE, 'state_scope': 'gateway'}, ], 'json_state': [{'redis_key': FOO_TYPE, 'state_scope': 'gateway'}] } service.loop = self.loop # Bind the rpc server to a free port self._rpc_server = grpc.server( futures.ThreadPoolExecutor(max_workers=10) ) port = self._rpc_server.add_insecure_port('0.0.0.0:0') # Add the servicer self._servicer = DummyStateServer() self._servicer.add_to_server(self._rpc_server) self._rpc_server.start() # Create a rpc stub self.channel = grpc.insecure_channel('0.0.0.0:{}'.format(port)) serde1 = RedisSerde(NID_TYPE, get_proto_serializer(), get_proto_deserializer(NetworkID)) serde2 = RedisSerde(FOO_TYPE, get_json_serializer(), get_json_deserializer()) serde3 = RedisSerde(LOG_TYPE, get_proto_serializer(), get_proto_deserializer(LogVerbosity)) self.nid_client = RedisFlatDict(self.mock_redis, serde1) self.foo_client = RedisFlatDict(self.mock_redis, serde2) self.log_client = RedisFlatDict(self.mock_redis, serde3) # Set up and start garbage collecting loop grpc_client_manager = GRPCClientManager( service_name="state", service_stub=StateServiceStub, max_client_reuse=60, ) # mock the get_default_client function used to return the same # fakeredis object func_mock = mock.MagicMock(return_value=self.mock_redis) with patch('magma.state.redis_dicts.get_default_client', func_mock): # Start state garbage collection loop self.garbage_collector = GarbageCollector(service, grpc_client_manager) def tearDown(self): self._rpc_server.stop(None) self.loop.close() @mock.patch('snowflake.snowflake', get_mock_snowflake) def test_collect_states_to_delete(self): async def test(): # Ensure setup is initialized properly self.nid_client.clear() self.foo_client.clear() self.log_client.clear() key = 'id1' self.nid_client[key] = NetworkID(id='foo') self.foo_client[key] = Foo("boo", 3) req = await self.garbage_collector._collect_states_to_delete() self.assertIsNone(req) self.nid_client.mark_as_garbage(key) self.foo_client.mark_as_garbage(key) req = await self.garbage_collector._collect_states_to_delete() self.assertEqual(2, len(req.ids)) for state_id in req.ids: if state_id.type == NID_TYPE: self.assertEqual('id1', state_id.deviceID) elif state_id.type == FOO_TYPE: self.assertEqual('aaa-bbb:id1', state_id.deviceID) else: self.fail("Unknown state type %s" % state_id.type) # Cleanup del self.foo_client[key] del self.nid_client[key] self.loop.run_until_complete(test()) @mock.patch('snowflake.snowflake', get_mock_snowflake) @mock.patch('magma.magmad.state_reporter.ServiceRegistry.get_rpc_channel') def test_garbage_collect_success(self, get_rpc_mock): async def test(): get_rpc_mock.return_value = self.channel self.nid_client.clear() self.foo_client.clear() self.log_client.clear() key = 'id1' foo = Foo("boo", 4) self.nid_client[key] = NetworkID(id='foo') self.foo_client[key] = foo self.nid_client.mark_as_garbage(key) self.foo_client.mark_as_garbage(key) req = await self.garbage_collector._collect_states_to_delete() self.assertEqual(2, len(req.ids)) # Ensure all garbage collected objects get deleted from Redis await self.garbage_collector._send_to_state_service(req) self.assertEqual(0, len(self.nid_client.keys())) self.assertEqual(0, len(self.foo_client.keys())) self.assertEqual(0, len(self.nid_client.garbage_keys())) self.assertEqual(0, len(self.foo_client.garbage_keys())) self.loop.run_until_complete(test()) @mock.patch('snowflake.snowflake', get_mock_snowflake) @mock.patch('magma.magmad.state_reporter.ServiceRegistry.get_rpc_channel') def test_garbage_collect_rpc_failure(self, get_rpc_mock): async def test(): get_rpc_mock.return_value = self.channel self.nid_client.clear() self.foo_client.clear() self.log_client.clear() key = 'id1' self.nid_client[key] = NetworkID(id='foo') self.log_client[key] = LogVerbosity(verbosity=3) self.nid_client.mark_as_garbage(key) self.log_client.mark_as_garbage(key) req = await self.garbage_collector._collect_states_to_delete() self.assertEqual(2, len(req.ids)) # Ensure objects on deleted from Redis on RPC failure await self.garbage_collector._send_to_state_service(req) self.assertEqual(0, len(self.nid_client.keys())) self.assertEqual(0, len(self.log_client.keys())) self.assertEqual(1, len(self.nid_client.garbage_keys())) self.assertEqual(1, len(self.log_client.garbage_keys())) # Cleanup del self.log_client[key] del self.nid_client[key] self.loop.run_until_complete(test()) @mock.patch('snowflake.snowflake', get_mock_snowflake) @mock.patch('magma.magmad.state_reporter.ServiceRegistry.get_rpc_channel') def test_garbage_collect_with_state_update(self, get_rpc_mock): async def test(): get_rpc_mock.return_value = self.channel self.nid_client.clear() self.foo_client.clear() self.log_client.clear() key = 'id1' foo = Foo("boo", 4) self.nid_client[key] = NetworkID(id='foo') self.foo_client[key] = foo self.nid_client.mark_as_garbage(key) self.foo_client.mark_as_garbage(key) req = await self.garbage_collector._collect_states_to_delete() self.assertEqual(2, len(req.ids)) # Update one of the states, to ensure we don't delete valid state # from Redis expected = NetworkID(id='bar') self.nid_client[key] = expected # Ensure all garbage collected objects get deleted from Redis await self.garbage_collector._send_to_state_service(req) self.assertEqual(1, len(self.nid_client.keys())) self.assertEqual(0, len(self.foo_client.keys())) self.assertEqual(0, len(self.nid_client.garbage_keys())) self.assertEqual(0, len(self.foo_client.garbage_keys())) self.assertEqual(expected, self.nid_client[key]) self.loop.run_until_complete(test())
class GatewayDirectoryServiceRpcServicer(GatewayDirectoryServiceServicer): """gRPC based server for the Directoryd Gateway service""" def __init__(self, print_grpc_payload: bool = False): """Initialize Directoryd grpc endpoints.""" serde = RedisSerde(DIRECTORYD_REDIS_TYPE, get_json_serializer(), get_json_deserializer()) self._redis_dict = RedisFlatDict(get_default_client(), serde) self._print_grpc_payload = print_grpc_payload if self._print_grpc_payload: logging.info("Printing GRPC messages") def add_to_server(self, server): """ Add the servicer to a gRPC server """ add_GatewayDirectoryServiceServicer_to_server(self, server) @return_void def UpdateRecord(self, request, context): """ Update the directory record of an object Args: request (UpdateRecordRequest): update record request """ logging.debug("UpdateRecord request received") self._print_grpc(request) if len(request.id) == 0: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details("ID argument cannot be empty in " "UpdateRecordRequest") return try: # Lock Redis for requested key until update is complete with self._redis_dict.lock(request.id): hwid = get_gateway_hwid() record = self._redis_dict.get(request.id) or \ DirectoryRecord(location_history=[hwid], identifiers={}) if record.location_history[0] != hwid: record.location_history = [hwid] + record.location_history for field_key in request.fields: record.identifiers[field_key] = request.fields[field_key] # Truncate location history to the five most recent hwid's record.location_history = \ record.location_history[:LOCATION_MAX_LEN] self._redis_dict[request.id] = record except (RedisError, LockError) as e: logging.error(e) context.set_code(grpc.StatusCode.UNAVAILABLE) context.set_details("Could not connect to redis: %s" % e) @return_void def DeleteRecord(self, request, context): """ Delete the directory record for an ID Args: request (DeleteRecordRequest): delete record request """ logging.debug("DeleteRecord request received") self._print_grpc(request) if len(request.id) == 0: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details("ID argument cannot be empty in " "DeleteRecordRequest") return # Lock Redis for requested key until delete is complete try: with self._redis_dict.lock(request.id): if request.id not in self._redis_dict: context.set_code(grpc.StatusCode.NOT_FOUND) context.set_details("Record for ID %s was not found." % request.id) return self._redis_dict.mark_as_garbage(request.id) except (RedisError, LockError) as e: logging.error(e) context.set_code(grpc.StatusCode.UNAVAILABLE) context.set_details("Could not connect to redis: %s" % e) def GetDirectoryField(self, request, context): """ Get the directory record field for an ID and key Args: request (GetDirectoryFieldRequest): get directory field request """ logging.debug("GetDirectoryField request received") self._print_grpc(request) if len(request.id) == 0: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details("ID argument cannot be empty in " "GetDirectoryFieldRequest") return if len(request.field_key) == 0: context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details("Field key argument cannot be empty in " "GetDirectoryFieldRequest") response = DirectoryField() self._print_grpc(response) return response # Lock Redis for requested key until get is complete try: with self._redis_dict.lock(request.id): if request.id not in self._redis_dict: context.set_code(grpc.StatusCode.NOT_FOUND) context.set_details("Record for ID %s was not found." % request.id) return DirectoryField() record = self._redis_dict[request.id] except (RedisError, LockError) as e: logging.error(e) context.set_code(grpc.StatusCode.UNAVAILABLE) context.set_details("Could not connect to redis: %s" % e) response = DirectoryField() self._print_grpc(response) return response if request.field_key not in record.identifiers: context.set_code(grpc.StatusCode.NOT_FOUND) context.set_details("Field %s was not found in record for " "ID %s" % (request.field_key, request.id)) return DirectoryField() response = DirectoryField(key=request.field_key, value=record.identifiers[request.field_key]) self._print_grpc(response) return response def GetAllDirectoryRecords(self, request, context): """ Get all directory records Args: request (Void): void """ logging.debug("GetAllDirectoryRecords request received") self._print_grpc(request) response = AllDirectoryRecords() try: redis_keys = self._redis_dict.keys() except RedisError as e: logging.error(e) context.set_code(grpc.StatusCode.UNAVAILABLE) context.set_details("Could not connect to redis: %s" % e) self._print_grpc(request) return response for key in redis_keys: try: with self._redis_dict.lock(key): # Lookup may produce an exception if the key has been # deleted between the call to __iter__ and lock stored_record = self._redis_dict[key] except (RedisError, LockError) as e: logging.error(e) context.set_code(grpc.StatusCode.UNAVAILABLE) context.set_details("Could not connect to redis: %s" % e) self._print_grpc(response) return response except KeyError: continue directory_record = response.records.add() directory_record.id = key directory_record.location_history[:] = \ stored_record.location_history for identifier_key in stored_record.identifiers: directory_record.fields[identifier_key] = \ stored_record.identifiers[identifier_key] self._print_grpc(response) return response def _print_grpc(self, message): if self._print_grpc_payload: log_msg = "{} {}".format(message.DESCRIPTOR.full_name, MessageToJson(message)) # add indentation padding = 2 * ' ' log_msg = ''.join("{}{}".format(padding, line) for line in log_msg.splitlines(True)) log_msg = "GRPC message:\n{}".format(log_msg) logging.info(log_msg)