def global_changesets_return_values(cmd): if cmd.checkpoint == 3: return Response(http_status=200, body={ "checkpoint": 0, "limit": 10, "changesets": [{ "stream_id": "stream3", "changeset_id": 1, "events": [{ "type": "init" }, { "type": "set" }], "metadata": {}, "checkpoint": 3 }, { "stream_id": "stream3", "changeset_id": 2, "events": [{ "type": "init" }, { "type": "set" }, { "type": "update" }], "metadata": {}, "checkpoint": 4 }, { "stream_id": "stream3", "changeset_id": 3, "events": [{ "type": "modify" }, { "type": "delete" }], "metadata": {}, "checkpoint": 5 }], "next_checkpoint": 6 }) return Response(http_status=200, body={ "checkpoint": 6, "limit": 10, "changesets": [], "next_checkpoint": 6 })
def execute(self, cmd): v = self.db.get_analysis_state() if not v: return Response(http_status=404, body={"error": "Statistics are not yet generated"}) return Response(http_status=200, body={ 'total_streams': v.total_streams, 'total_changesets': v.total_changesets, 'total_events': v.total_events, 'max_stream_length': v.max_stream_length, 'statistics_version': v.version })
def missing_stream_id(): return Response( http_status=400, body = { "error": "MISSING_STREAM_ID", "message": 'stream_id is a required value' })
def invalid_events_checkpoint_value(checkpoint_string): return Response( http_status=400, body={ "error": "INVALID_CHECKPOINT", "message": f'"{checkpoint_string}" is an invalid checkpoint value. Set a valid checkpoint(e.g. "42.1").' })
def execute(self, cmd): limit = cmd.limit or self.default_limit changesets = self.db.fetch_global_changesets(cmd.checkpoint, limit) changesets = [{ "stream_id": c.stream_id, "changeset_id": c.changeset_id, "events": c.events, "metadata": c.metadata, "checkpoint": self.checkpoint_calc.to_checkpoint(c.page, c.page_item) } for c in changesets] next_checkpoint = cmd.checkpoint if changesets: next_checkpoint = max([c["checkpoint"] for c in changesets]) + 1 return Response(http_status=200, body={ "checkpoint": cmd.checkpoint, "limit": limit, "changesets": changesets, "next_checkpoint": next_checkpoint })
def invalid_limit_value(limit): return Response( http_status=400, body={ "error": "INVALID_LIMIT", "message": f'"{limit}" is an invalid limit value. Expected an integer value greater than 0.' })
def execute(self, cmd): changesets = self.db.fetch_stream_by_events( cmd.stream_id, from_event=cmd.from_event, to_event=cmd.to_event) events = [] for c in changesets: for i, e in enumerate(c.events): events.append({ "id": c.first_event_id + i, "data": e }) events = [e for e in events if (not cmd.from_event or e["id"] >= cmd.from_event) and (not cmd.to_event or e["id"] <= cmd.to_event)] if not events: last_commit = self.db.fetch_last_commit(cmd.stream_id) if not last_commit: return self.stream_not_found(cmd.stream_id) return Response( http_status=200, body={ "stream_id": cmd.stream_id, "events": events })
def invalid_checkpoint_value(checkpoint): return Response( http_status=400, body={ "error": "INVALID_CHECKPOINT", "message": f'"{checkpoint}" is an invalid checkpoint value. Expected a positive integer value.' })
def stream_not_found(self, stream_id): return Response( http_status=404, body={ "stream_id": stream_id, "error": "STREAM_NOT_FOUND", "message": f'The specified stream({stream_id}) doesn\'t exist' })
def invalid_filtering_values(stream_id, from_changeset, to_changeset, filter_type): return Response( http_status=400, body={ "stream_id": stream_id, "error": f"INVALID_{filter_type}_FILTERING_PARAMS", "message": f'The higher boundary cannot be lower than the lower boundary: {from_changeset}(from) > {to_changeset}(to)' })
def invalid_filtering_values_type(stream_id, filter_type): return Response( http_status=400, body={ "stream_id": stream_id, "error": f"INVALID_{filter_type}_FILTERING_PARAMS", "message": 'The filtering params(from, to) have to be positive integer values' })
def invalid_expected_event_id(stream_id, expected_last_event_id): return Response( http_status=400, body={ "stream_id": stream_id, "error": "INVALID_EXPECTED_EVENT_ID", "message": f'The specified expected event id("{expected_last_event_id}") is invalid. Expected a positive integer.' })
def missing_expected_changeset_exception(self, stream_id, lock_type, last_changeset, last_known): return Response( http_status=400, body={ "stream_id": stream_id, "error": "INVALID_EXPECTED_CHANGESET_ID", "message": f'The specified expected {lock_type}({last_changeset}) doesn\'t exist. The "{stream_id}" stream\'s most recent {lock_type} is {last_known}.' })
def parse_commit_request(event, context): query_string = event.get("queryStringParameters") or {} stream_id = event["pathParameters"].get("stream_id") if not stream_id: return missing_stream_id() expected_last_changeset = query_string.get("expected_last_changeset") if expected_last_changeset is not None and expected_last_changeset != "": try: expected_last_changeset = int(expected_last_changeset) except ValueError: return invalid_expected_changeset_id(stream_id, expected_last_changeset) if expected_last_changeset < 0: return invalid_expected_changeset_id(stream_id, expected_last_changeset) else: expected_last_changeset = None expected_last_event = query_string.get("expected_last_event") if expected_last_event is not None and expected_last_event != "": try: expected_last_event = int(expected_last_event) except ValueError: return invalid_expected_event_id(stream_id, expected_last_event) if expected_last_event < 0: return invalid_expected_event_id(stream_id, expected_last_event) else: expected_last_event = None if expected_last_changeset is None and expected_last_event is None: expected_last_changeset = 0 if expected_last_changeset is not None and expected_last_event is not None: return Response( http_status=400, body={ "stream_id": stream_id, "error": "BOTH_EXPECTED_CHANGESET_AND_EVENT_ARE_SET", "message": 'Cannot use both "last_changeset_id" and "last_event_id" for concurrency management. Specify only one value.' }) body = json.loads(event["body"]) metadata = body.get("metadata", { }) events = body["events"] return Commit( stream_id=stream_id, expected_last_changeset=expected_last_changeset, expected_last_event=expected_last_event, events=events, metadata=metadata )
def execute(self, cmd): logger.debug( f'expected last changeset id {cmd.expected_last_changeset}') logger.debug(f'expected last event id {cmd.expected_last_event}') commit = None if cmd.expected_last_changeset == 0 or cmd.expected_last_event == 0: commit = make_initial_commit(cmd.stream_id, cmd.events, cmd.metadata) else: prev_commit = self.db.fetch_last_commit(cmd.stream_id) if cmd.expected_last_changeset and \ prev_commit.changeset_id > cmd.expected_last_changeset: return self.concurrency_exception(cmd.stream_id, cmd.expected_last_changeset, cmd.expected_last_event) if cmd.expected_last_changeset and \ prev_commit.changeset_id < cmd.expected_last_changeset: return self.missing_expected_changeset_exception( cmd.stream_id, 'changeset', cmd.expected_last_changeset, prev_commit.changeset_id) if cmd.expected_last_event and \ prev_commit.last_event_id > cmd.expected_last_event: return self.concurrency_exception(cmd.stream_id, cmd.expected_last_changeset, cmd.expected_last_event) if cmd.expected_last_event and \ prev_commit.last_event_id < cmd.expected_last_event: return self.missing_expected_changeset_exception( cmd.stream_id, 'event', cmd.expected_last_event, prev_commit.last_event_id) commit = make_next_commit(prev_commit, cmd.events, cmd.metadata) try: self.db.append(commit) except ConcurrencyException: return self.concurrency_exception(cmd.stream_id, cmd.expected_last_changeset, cmd.expected_last_event) return Response(http_status=200, body={ "stream_id": commit.stream_id, "changeset_id": commit.changeset_id })
def execute(self, cmd): changesets = self.db.fetch_stream_changesets( cmd.stream_id, from_changeset=cmd.from_changeset, to_changeset=cmd.to_changeset) changesets = [{ "changeset_id": c.changeset_id, "events": c.events, "metadata": c.metadata } for c in changesets] if not changesets: last_commit = self.db.fetch_last_commit(cmd.stream_id) if not last_commit: return self.stream_not_found(cmd.stream_id) return Response(http_status=200, body={ "stream_id": cmd.stream_id, "changesets": changesets })
def concurrency_exception(self, stream_id, expected_last_changeset, expected_last_event): lock_by = None lock_value = None forthcoming_changesets = None if expected_last_changeset: lock_by = "changeset" lock_value = expected_last_changeset forthcoming_changesets = self.db.fetch_stream_changesets( stream_id, from_changeset=expected_last_changeset + 1) if expected_last_event: lock_by = "event" lock_value = expected_last_event forthcoming_changesets = self.db.fetch_stream_by_events( stream_id, from_event=expected_last_event + 1) forthcoming_changesets = [{ "changeset_id": c.changeset_id, "events": c.events, "metadata": c.metadata } for c in forthcoming_changesets] return Response( http_status=409, body={ "stream_id": stream_id, "error": "OPTIMISTIC_CONCURRENCY_EXCEPTION", "forthcoming_changesets": forthcoming_changesets, "message": f'The expected last {lock_by} ({lock_value}) is outdated, review the {lock_by}(s) appended after it.' })
def execute(self, event): return Response(http_status=404, body={"message": "Invalid endpoint"})
def execute(self, cmd): return Response( http_status=200, body={ "version": "0.0.1" })