def test_position_list_update(json_client, data, redis_connection, reset_redis_data): data["events"][0]["fields"]["f"] = [] create_model(json_client, data, redis_connection, reset_redis_data) data["events"][0] = { "type": "update", "fqid": "a/1", "list_fields": { "add": { "f": [42] } }, } response = json_client.post(WRITE_URL, data) assert_response_code(response, 201) connection_handler = injector.get(ConnectionHandler) with connection_handler.get_connection_context(): read_db = injector.get(ReadDatabase) read_db_model = read_db.get("a/1") assert read_db_model == { "f": [42], "meta_deleted": False, "meta_position": 2 }
def test_master_singleton(self): injector.register(MasterServiceSingleton, MasterServiceSingleton) injector.register(ClientService, ClientServiceFactoryDirectSingleton) a = injector.get(ClientService) b = injector.get(ClientService) assert a != b assert a.master_service == b.master_service
def test_master_factory(self): injector.register(MasterServiceFactory, MasterServiceFactory) injector.register(ClientService, ClientServiceFactoryDirectFactory) a = injector.get(ClientService) b = injector.get(ClientService) assert a != b assert a.master_service != b.master_service
def test_client_factory_service_singleton_multi_get(self): injector.register(MasterService, MasterServiceSingleton) injector.register(ClientService, ClientServiceFactory) a = injector.get(ClientService) b = injector.get(ClientService) assert a != b assert a.master_service == b.master_service
def test_with_sql_dump(write, finalize, assert_model): connection_handler = injector.get(ConnectionHandler) with connection_handler.get_connection_context(): with connection_handler.get_current_connection().cursor() as cursor: cursor.execute(open("tests/dump.sql", "r").read(), []) migration_handler = injector.get(MigrationHandler) migration_handler.register_migrations( *MigrationWrapper.load_migrations("migrations")) migration_handler.finalize()
def _finalize(migration_module_name): migration_module = import_module(f"migrations.{migration_module_name}") class Migration(migration_module.Migration): target_migration_index = 2 connection = injector.get(ConnectionHandler) with connection.get_connection_context(): connection.execute("update positions set migration_index=%s", [1]) migration_handler = injector.get(MigrationHandler) migration_handler.register_migrations(Migration) migration_handler.finalize()
def assert_no_model(fqid): connection_handler = injector.get(ConnectionHandler) with connection_handler.get_connection_context(): # read from read db read_db = injector.get(ReadDatabase) with pytest.raises(ModelDoesNotExist): read_db.get(fqid) # assert last event is a deleted one event_type = connection_handler.query_single_value( "select type from events where fqid=%s order by id desc limit 1", [fqid]) assert event_type in (EVENT_TYPES.DELETE, None)
def wrapper(*args, **kwargs): env_service: EnvironmentService = injector.get(EnvironmentService) RETRY_TIMEOUT = int( env_service.try_get("DATASTORE_RETRY_TIMEOUT") or 10) MAX_RETRIES = int(env_service.try_get("DATASTORE_MAX_RETRIES") or 3) tries = 0 while True: try: return fn(*args, **kwargs) except DatabaseError as e: # this seems to be the only indication for a sudden connection break if (isinstance(e.base_exception, psycopg2.OperationalError) and e.base_exception.pgcode is None): tries += 1 if tries < MAX_RETRIES: oe = e.base_exception logger.info( f"Retrying request to database because of the following error ({type(oe).__name__}, code {oe.pgcode}): {oe.pgerror}" # noqa ) else: raise else: raise if RETRY_TIMEOUT: sleep(RETRY_TIMEOUT / 1000)
def test_init_error(): os.environ["DATASTORE_MIN_CONNECTIONS"] = "1" injector.get(EnvironmentService).cache = {} connect = MagicMock() connect.side_effect = psycopg2.Error with patch("psycopg2.connect", new=connect): with pytest.raises(DatabaseError): PgConnectionHandlerService()
def test_read_db_is_updated_before_redis_fires(json_client, data): messaging = injector.get(Messaging) connection_handler = injector.get(ConnectionHandler) def assert_read_db_data(*args, **kwargs): connection = psycopg2.connect( **connection_handler.get_connection_params()) with connection.cursor() as cursor: cursor.execute("select * from models where fqid = 'a/1'") result = cursor.fetchone() # assert the model exists assert result with patch.object(messaging, "handle_events", new=assert_read_db_data): response = json_client.post(WRITE_URL, data) assert_response_code(response, 201)
def reserve_ids(self, data: JSON) -> List[int]: try: parsed_data = ReserveIdsRequestJSON(**reserve_ids_schema(data)) except fastjsonschema.JsonSchemaException as e: raise InvalidRequest(e.message) writer = injector.get(Writer) return writer.reserve_ids(parsed_data.collection, parsed_data.amount)
def test_single_delete(json_client, data, redis_connection, reset_redis_data): create_model(json_client, data, redis_connection, reset_redis_data) data["events"][0] = {"type": "delete", "fqid": "a/1"} response = json_client.post(WRITE_URL, data) assert_response_code(response, 201) assert_no_model("a/1") # assert the model is still in the lookup table, but marked as deleted connection_handler = injector.get(ConnectionHandler) with connection_handler.get_connection_context(): # read from read db read_db: ReadDatabase = injector.get(ReadDatabase) model = read_db.get("a/1", [], DeletedModelsBehaviour.ONLY_DELETED) assert model == {"f": 1, "meta_deleted": True, "meta_position": 2} assert read_db.is_deleted("a/1") assert_modified_fields(redis_connection, {"a/1": ["f"]})
def _read_model(fqid, position=None): reader: Reader = injector.get(Reader) with reader.get_database_context(): request = GetRequest( fqid=fqid, position=position, get_deleted_models=DeletedModelsBehaviour.ALL_MODELS, ) return reader.get(request)
def write(self, data: JSON) -> None: if not isinstance(data, list): data = [data] write_requests = [] for request in data: write_requests.append(self.build_write_request(request)) writer = injector.get(Writer) writer.write(write_requests)
def test_position_delete(json_client, data, redis_connection, reset_redis_data): create_model(json_client, data, redis_connection, reset_redis_data) data["events"][0] = {"type": "delete", "fqid": "a/1"} response = json_client.post(WRITE_URL, data) assert_response_code(response, 201) connection_handler = injector.get(ConnectionHandler) with connection_handler.get_connection_context(): read_db = injector.get(ReadDatabase) read_db_model = read_db.get( "a/1", get_deleted_models=DeletedModelsBehaviour.ONLY_DELETED) assert read_db_model == { "f": 1, "meta_deleted": True, "meta_position": 2 }
def _assert_model(fqid, expected, position=None): if position is None: assert read_model(fqid) == expected # get max position read_database: ReadDatabase = injector.get(ReadDatabase) with read_database.get_context(): position = read_database.get_max_position() # build model and check assert read_model(fqid, position=position) == expected
def assert_model(fqid, model, position): connection_handler = injector.get(ConnectionHandler) with connection_handler.get_connection_context(): # read from read db read_db = injector.get(ReadDatabase) read_db_model = read_db.get(fqid) model[META_DELETED] = False model[META_POSITION] = position assert read_db_model == model # build model and assert that the last event is not a deleted. built_model = read_db.build_model_ignore_deleted(fqid) del model[META_POSITION] del built_model[META_POSITION] assert built_model == model event_type = connection_handler.query_single_value( "select type from events where fqid=%s order by id desc limit 1", [fqid]) assert (isinstance(event_type, str) and len(event_type) > 0 and event_type != EVENT_TYPES.DELETE)
def test_get_connection_different(): os.environ["DATASTORE_MAX_CONNECTIONS"] = "2" injector.get(EnvironmentService).cache = {} handler = service(PgConnectionHandlerService)() def get_connection_from_thread(): with concurrent.futures.ThreadPoolExecutor() as executor: future = executor.submit(handler.get_connection) return future.result() connection1 = get_connection_from_thread() connection2 = get_connection_from_thread() assert connection1 != connection2
def wrapper(*args, **kwargs): error_dict = None try: return fn(*args, **kwargs) except DatabaseError as e: return {"error": e.msg}, 500 except InvalidFormat as e: error_dict = { "type": ERROR_CODES.INVALID_FORMAT, "msg": e.msg, "type_verbose": "INVALID_FORMAT", } except InvalidRequest as e: error_dict = { "type": ERROR_CODES.INVALID_REQUEST, "msg": e.msg, "type_verbose": "INVALID_REQUEST", } except ModelDoesNotExist as e: error_dict = { "type": ERROR_CODES.MODEL_DOES_NOT_EXIST, "fqid": e.fqid, "type_verbose": "MODEL_DOES_NOT_EXIST", } except ModelExists as e: error_dict = { "type": ERROR_CODES.MODEL_EXISTS, "fqid": e.fqid, "type_verbose": "MODEL_EXISTS", } except ModelNotDeleted as e: error_dict = { "type": ERROR_CODES.MODEL_NOT_DELETED, "fqid": e.fqid, "type_verbose": "MODEL_NOT_DELETED", } except ModelLocked as e: error_dict = { "type": ERROR_CODES.MODEL_LOCKED, "keys": e.keys, "type_verbose": "MODEL_LOCKED", } except Exception as e: print(e, type(e)) raise e env_service = injector.get(EnvironmentService) if env_service.is_dev_mode(): logger.debug(f"HTTP error 400: {error_dict}") return {"error": error_dict}, 400
def test_request_misconfigured_migrations(self, gbmi: Any) -> None: write_request = self.get_create_request("topic/1", {"title": "dummy"}) write_request.migration_index = 6 with self.datastore.get_database_context(): self.datastore.write(write_request) gbmi.return_value = 5 response = self.request("dummy", {}) self.assert_status_code(response, 400) self.assertIn( "Migration indices do not match: Datastore has 6 and the backend has 5", response.json["message"], ) read_db = injector.get(ReadDatabase) assert isinstance(read_db, SqlReadDatabaseBackendService) read_db.current_migration_index = MIGRATION_INDEX_NOT_INITIALIZED
def test_singleton_and_factory(self): for client_service, master_service in ( (ClientServiceFactory, MasterServiceFactory), (ClientServiceFactory, MasterServiceSingleton), (ClientServiceSingleton, MasterServiceSingleton), ): injector.register(MasterService, master_service) injector.register(ClientService, client_service) cs = injector.get(ClientService) assert type(cs) == client_service assert type(cs.master_service) == master_service assert cs.value == "default" assert cs.another_value == "default2" assert cs.init_master_service == cs.master_service
def assert_migration_index() -> None: connection = injector.get(ConnectionHandler) with connection.get_connection_context(): if connection.query_single_value("select count(*) from positions", []) == 0: return # Datastore is empty; nothing to check. datastore_migration_index = get_datastore_migration_index() if datastore_migration_index == -1: return # Datastore is up-to-date; nothing to do. backend_migration_index = get_backend_migration_index() if backend_migration_index > datastore_migration_index: raise MissingMigrations( f"Missing {backend_migration_index-datastore_migration_index} migrations to apply." ) if backend_migration_index < datastore_migration_index: raise MisconfiguredMigrations( f"Migration indices do not match: Datastore has {datastore_migration_index} and the backend has {backend_migration_index}" )
def handle_request(self, route: Route, data: JSON) -> Dict: """ A generic handler for all requests. Parses the request to a python object according to the route_setup and execute the according route_handler. """ try: route_configuration = route_configurations[route] except KeyError: raise BadCodingError("Invalid route metadata: " + route) logger.info(f"{route.upper()}-request: {data}") try: request_data = route_configuration.schema(data) except fastjsonschema.JsonSchemaException as e: if route_configuration.schema_error_handler: route_configuration.schema_error_handler(e) raise InvalidRequest(e.message) try: request_object = from_dict( route_configuration.request_class, request_data, Config(check_types=False), ) except (TypeError, MissingValueError) as e: raise BadCodingError("Invalid data to initialize class\n" + str(e)) reader = injector.get(Reader) route_handler = getattr(reader, route) if route_configuration.dev_only: route_handler = dev_only_route(route_handler) with reader.get_database_context(): return route_handler(request_object)
def connection(provide_di): yield injector.get(ConnectionHandler)
def test_not_found_in_non_dev(json_client): injector.get(EnvironmentService).set(DATASTORE_DEV_MODE_ENVIRONMENT_VAR, "0") response = json_client.post(TRUNCATE_DB_URL, {}) assert_response_code(response, 404)
def env_service(reset_di): # noqa injector.register(EnvironmentService, EnvironmentService) yield injector.get(EnvironmentService)
def get_datastore_migration_index() -> int: read_db = injector.get(ReadDatabase) with read_db.get_context(): datastore_migration_index = read_db.get_current_migration_index() return datastore_migration_index
def environment_service(reset_di): # noqa injector.register(EnvironmentService, EnvironmentService) env_service = injector.get(EnvironmentService) yield env_service env_service.cache = {}
from urllib.error import URLError from datastore.shared.di import injector from datastore.shared.postgresql_backend import ConnectionHandler from datastore.shared.services import EnvironmentService from datastore.shared.util import build_fqid from datastore.writer.app import register_services from datastore.writer.core import ( BaseRequestEvent, RequestCreateEvent, Writer, WriteRequest, ) register_services() connection: ConnectionHandler = injector.get(ConnectionHandler) env_service: EnvironmentService = injector.get(EnvironmentService) writer: Writer = injector.get(Writer) with connection.get_connection_context(): events_count = connection.query_single_value( "SELECT COUNT(*) FROM events LIMIT 1", []) if events_count: if len(sys.argv) > 1 and sys.argv[1] == "-f": print("Warning: database is not empty! Executing anyway...") else: print("Error: Some events are already present, aborting.") sys.exit(1) path = env_service.get("DATASTORE_INITIAL_DATA_FILE") print(f"Loading data: {path}")
def shutdown_service(reset_di): # noqa injector.register(ShutdownService, ShutdownService) yield injector.get(ShutdownService)